diff --git a/.github/requirements.txt b/.github/requirements.txt new file mode 100644 index 0000000..de6a2ac --- /dev/null +++ b/.github/requirements.txt @@ -0,0 +1,4 @@ +distutils-pytest +git-props +pytest >=3.7.0 +setuptools diff --git a/.github/workflows/publish-to-pypi.yaml b/.github/workflows/publish-to-pypi.yaml new file mode 100644 index 0000000..66b197c --- /dev/null +++ b/.github/workflows/publish-to-pypi.yaml @@ -0,0 +1,29 @@ +name: Publish +on: + release: + types: + - published +jobs: + PyPI: + name: publish release to PyPI + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write + env: + SDIST: pytest-dependency-${{ github.event.release.tag_name }}.tar.gz + steps: + - name: Fetch assets + uses: cb80/dlassets@latest + with: + tag: ${{ github.event.release.tag_name }} + to: assets + - name: Check assets + run: | + ls -la assets + - name: Copy distfile to dist directory + run: | + mkdir -p dist + cp -p assets/$SDIST dist + - name: Upload distfile to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/rst-lint.yaml b/.github/workflows/rst-lint.yaml new file mode 100644 index 0000000..b9b239f --- /dev/null +++ b/.github/workflows/rst-lint.yaml @@ -0,0 +1,17 @@ +name: Check ReST input files +on: + push: + branches: + - develop + - master + pull_request: +jobs: + doc8: + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v4 + - name: doc8-check + uses: deep-entertainment/doc8-action@v4 + with: + scanPaths: "doc/src" diff --git a/.github/workflows/run-tests.yaml b/.github/workflows/run-tests.yaml new file mode 100644 index 0000000..5a9a95f --- /dev/null +++ b/.github/workflows/run-tests.yaml @@ -0,0 +1,38 @@ +name: Run Test +on: + push: + branches: + - develop + - master + pull_request: +jobs: + Test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + python-version: + - '3.7' + - '3.8' + - '3.9' + - '3.10' + - '3.11' + - '3.12' + os: [ubuntu-latest] + include: + - python-version: '3.6' + os: ubuntu-20.04 + steps: + - name: Check out repository code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + pip install -r .github/requirements.txt + - name: Test with pytest + run: | + python setup.py test diff --git a/.gitignore b/.gitignore index 9fb4879..3ccd468 100644 --- a/.gitignore +++ b/.gitignore @@ -1,15 +1,6 @@ -*.pyc -*~ -.cache/ __pycache__/ -/.version +/.env /MANIFEST +/_meta.py /build/ /dist/ -/doc/doctest/ -/doc/doctrees/ -/doc/html/ -/doc/latex/ -/doc/linkcheck/ -/pytest_dependency.egg-info/ -/python2_6.patch diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..726b61a --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,21 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.12" + jobs: + post_checkout: + - git fetch --unshallow + post_install: + - python setup.py build + +sphinx: + configuration: doc/src/conf.py + +python: + install: + - requirements: .rtd-require diff --git a/.rtd-require b/.rtd-require new file mode 100644 index 0000000..1742889 --- /dev/null +++ b/.rtd-require @@ -0,0 +1,5 @@ +git-props +pytest >=3.7.0 +setuptools +sphinx-copybutton +sphinx_rtd_theme diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f5745fa..0000000 --- a/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -language: python -python: - - "2.7" - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" -install: pip install -r requirements.txt -script: make test - -# Local Variables: -# mode: yaml -# End: diff --git a/CHANGES.rst b/CHANGES.rst new file mode 100644 index 0000000..4bd0bc1 --- /dev/null +++ b/CHANGES.rst @@ -0,0 +1,216 @@ +Changelog +========= + +0.6.0 (2023-12-31) +~~~~~~~~~~~~~~~~~~ + +Documentation +------------- + ++ `#39`_, `#41`_, `#59`_: Review documentation + +Incompatible changes +-------------------- + ++ Drop support for Python 2. + +Bug fixes and minor changes +--------------------------- + ++ `#40`_: add logging. ++ `#50`_, `#51`_: test suite incompatibility with pytest 6.2.0. ++ `#58`_: declare the type of automark_dependency ini-option correctly + as bool. + +Internal +-------- + ++ `#75`_: review build tool chain. + +.. _#39: https://github.com/RKrahl/pytest-dependency/issues/39 +.. _#40: https://github.com/RKrahl/pytest-dependency/issues/40 +.. _#41: https://github.com/RKrahl/pytest-dependency/issues/41 +.. _#50: https://github.com/RKrahl/pytest-dependency/issues/50 +.. _#51: https://github.com/RKrahl/pytest-dependency/pull/51 +.. _#58: https://github.com/RKrahl/pytest-dependency/pull/58 +.. _#59: https://github.com/RKrahl/pytest-dependency/pull/59 +.. _#75: https://github.com/RKrahl/pytest-dependency/pull/75 + +0.5.1 (2020-02-14) +~~~~~~~~~~~~~~~~~~ + +Bug fixes and minor changes +--------------------------- + ++ Fix failing documentation build. + +0.5.0 (2020-02-14) +~~~~~~~~~~~~~~~~~~ + +New features +------------ + ++ `#3`_, `#35`_: add a scope to dependencies. + (Thanks to JoeSc and selenareneephillips!) + +Incompatible changes +-------------------- + ++ Require pytest version 3.7.0 or newer. + +Bug fixes and minor changes +--------------------------- + ++ `#34`_: failing test with pytest 4.2.0 and newer. + ++ Use setuptools_scm to manage the version number. + +.. _#35: https://github.com/RKrahl/pytest-dependency/pull/35 +.. _#34: https://github.com/RKrahl/pytest-dependency/issues/34 +.. _#3: https://github.com/RKrahl/pytest-dependency/issues/3 + +0.4.0 (2018-12-02) +~~~~~~~~~~~~~~~~~~ + +Incompatible changes +-------------------- + ++ Require pytest version 3.6.0 or newer. This implicitly drops + support for Python 2.6 and for Python 3.3 and older. + +Bug fixes and minor changes +--------------------------- + ++ `#24`_, `#25`_: get_marker no longer available in pytest 4.0.0. + (Thanks to Rogdham!) + ++ `#28`_: Applying markers directly in parametrize is no longer + available in 4.0. + +.. _#28: https://github.com/RKrahl/pytest-dependency/issues/28 +.. _#25: https://github.com/RKrahl/pytest-dependency/pull/25 +.. _#24: https://github.com/RKrahl/pytest-dependency/issues/24 + +0.3.2 (2018-01-17) +~~~~~~~~~~~~~~~~~~ + +Bug fixes and minor changes +--------------------------- + ++ `#5`_: properly register the dependency marker. + ++ Do not add the documentation to the source distribution. + +.. _#5: https://github.com/RKrahl/pytest-dependency/issues/5 + +0.3.1 (2017-12-26) +~~~~~~~~~~~~~~~~~~ + +Bug fixes and minor changes +--------------------------- + ++ `#17`_: Move the online documentation to Read the Docs. + ++ Some improvements in the documentation. + +.. _#17: https://github.com/RKrahl/pytest-dependency/issues/17 + +0.3 (2017-12-26) +~~~~~~~~~~~~~~~~ + +New features +------------ + ++ `#7`_: Add a configuration switch to implicitly mark all tests. + ++ `#10`_: Add an option to ignore unknown dependencies. + +Incompatible changes +-------------------- + ++ Prepend the class name to the default test name for test class + methods. This fixes a potential name conflict, see `#6`_. + + If your code uses test classes and you reference test methods by + their default name, you must add the class name. E.g. if you have + something like: + + .. code-block:: python + + class TestClass(object): + + @pytest.mark.dependency() + def test_a(): + pass + + @pytest.mark.dependency(depends=["test_a"]) + def test_b(): + pass + + you need to change this to: + + .. code-block:: python + + class TestClass(object): + + @pytest.mark.dependency() + def test_a(): + pass + + @pytest.mark.dependency(depends=["TestClass::test_a"]) + def test_b(): + pass + + If you override the test name in the :func:`pytest.mark.dependency` + marker, nothing need to be changed. + +Bug fixes and minor changes +--------------------------- + ++ `#11`_: show the name of the skipped test. + (Thanks asteriogonzalez!) + ++ `#13`_: Do not import pytest in setup.py to make it compatible with + pipenv. + ++ `#15`_: tests fail with pytest 3.3.0. + ++ `#8`_: document incompatibility with parallelization in + pytest-xdist. + ++ Clarify in the documentation that Python 3.1 is not officially + supported because pytest 2.8 does not support it. There is no known + issue with Python 3.1 though. + +.. _#15: https://github.com/RKrahl/pytest-dependency/issues/15 +.. _#13: https://github.com/RKrahl/pytest-dependency/issues/13 +.. _#11: https://github.com/RKrahl/pytest-dependency/pull/11 +.. _#10: https://github.com/RKrahl/pytest-dependency/issues/10 +.. _#8: https://github.com/RKrahl/pytest-dependency/issues/8 +.. _#7: https://github.com/RKrahl/pytest-dependency/issues/7 +.. _#6: https://github.com/RKrahl/pytest-dependency/issues/6 + +0.2 (2017-05-28) +~~~~~~~~~~~~~~~~ + +New features +------------ + ++ `#2`_: Add documentation. + ++ `#4`_: Add a depend() function to add a dependency to a test at + runtime. + +.. _#4: https://github.com/RKrahl/pytest-dependency/issues/4 +.. _#2: https://github.com/RKrahl/pytest-dependency/issues/2 + +0.1 (2017-01-29) +~~~~~~~~~~~~~~~~ + ++ Initial release as an independent Python module. + + This code was first developed as part of a larger package, + `python-icat`_, at Helmholtz-Zentrum Berlin für Materialien und + Energie. + +.. _python-icat: https://github.com/icatproject/python-icat diff --git a/MANIFEST.in b/MANIFEST.in index e5caef0..def022d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,7 +1,8 @@ -include .version +include CHANGES.rst include LICENSE.txt include MANIFEST.in include README.rst +include _meta.py include doc/examples/*.py include tests/conftest.py include tests/pytest.ini diff --git a/Makefile b/Makefile index 7d086ac..74a5ba7 100644 --- a/Makefile +++ b/Makefile @@ -1,34 +1,31 @@ -PYTHON = python -BUILDDIR = $(CURDIR)/build +PYTHON = python3 +BUILDLIB = $(CURDIR)/build/lib build: $(PYTHON) setup.py build -test: build - PYTHONPATH=$(BUILDDIR)/lib $(PYTHON) -m pytest tests +test: + $(PYTHON) setup.py test sdist: $(PYTHON) setup.py sdist -doc-html: .version - $(MAKE) -C doc html +doc-html: build + $(MAKE) -C doc html PYTHONPATH=$(BUILDLIB) clean: - rm -f *~ tests/*~ rm -rf build - $(MAKE) -C doc clean + rm -rf __pycache__ distclean: clean - rm -rf .cache tests/.cache .pytest_cache tests/.pytest_cache - rm -f *.pyc tests/*.pyc - rm -rf __pycache__ tests/__pycache__ - rm -f MANIFEST .version + rm -f MANIFEST _meta.py rm -rf dist - rm -rf pytest_dependency.egg-info + rm -rf tests/.pytest_cache $(MAKE) -C doc distclean -.version: - $(PYTHON) setup.py check +meta: + $(PYTHON) setup.py meta -.PHONY: build test sdist doc-html clean distclean + +.PHONY: build test sdist doc-html clean distclean meta diff --git a/README.rst b/README.rst index 947a268..94b5add 100644 --- a/README.rst +++ b/README.rst @@ -1,84 +1,45 @@ -.. image:: https://travis-ci.org/RKrahl/pytest-dependency.svg?branch=master - :target: https://travis-ci.org/RKrahl/pytest-dependency +|gh-test| |rtd| |pypi| -pytest-dependency - Manage dependencies of tests -================================================ - -This pytest plugin manages dependencies of tests. It allows to mark -some tests as dependent from other tests. These tests will then be -skipped if any of the dependencies did fail or has been skipped. - - -Download --------- - -The latest release version can be found at PyPI, see - - https://pypi.python.org/pypi/pytest_dependency - - -System requirements -------------------- - -+ Python 2.7 or 3.4 and newer. -+ `setuptools`_. -+ `pytest`_ 3.6.0 or newer. - -Optional library packages: - -+ `setuptools_scm`_ - - The version number is managed using this package. All source - distributions add a static text file with the version number and - fall back using that if `setuptools_scm` is not available. So this - package is only needed to build out of the plain development source - tree as cloned from GitHub. +.. |gh-test| image:: https://github.com/RKrahl/pytest-dependency/actions/workflows/run-tests.yaml/badge.svg + :target: https://github.com/RKrahl/pytest-dependency/actions/workflows/run-tests.yaml + :alt: GitHub Workflow Status + +.. |rtd| image:: https://img.shields.io/readthedocs/pytest-dependency/latest + :target: https://pytest-dependency.readthedocs.io/en/latest/ + :alt: Documentation build status +.. |pypi| image:: https://img.shields.io/pypi/v/pytest-dependency + :target: https://pypi.org/project/pytest-dependency/ + :alt: PyPI version -Installation ------------- - -1. Download the sources, unpack, and change into the source directory. - -2. Build (optional):: - - $ python setup.py build - -3. Test (optional):: - - $ python -m pytest - -4. Install:: - - $ python setup.py install - -The last step might require admin privileges in order to write into -the site-packages directory of your Python installation. +pytest-dependency – Manage dependencies of tests +================================================ +This module is a plugin for the popular Python testing framework +`pytest`_. It manages dependencies of tests: you may mark some tests +as dependent from other tests. These tests will then be skipped if +any of the dependencies did fail or has been skipped. Documentation ------------- -The documentation can be found at - - https://pytest-dependency.readthedocs.io/ +See the `online documentation`__. The example test modules used in the documentation can be found in doc/examples in the source distribution. +.. __: `Read the Docs site`_ + Copyright and License --------------------- -- Copyright 2013-2015 +- Copyright 2013–2015 Helmholtz-Zentrum Berlin für Materialien und Energie GmbH -- Copyright 2016-2020 Rolf Krahl - -Licensed under the Apache License, Version 2.0 (the "License"); you -may not use this file except in compliance with the License. You may -obtain a copy of the License at +- Copyright 2016–2023 Rolf Krahl - http://www.apache.org/licenses/LICENSE-2.0 +Licensed under the `Apache License`_, Version 2.0 (the "License"); you +may not use this file except in compliance with the License. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -87,6 +48,6 @@ implied. See the License for the specific language governing permissions and limitations under the License. -.. _setuptools: http://pypi.python.org/pypi/setuptools/ .. _pytest: http://pytest.org/ -.. _setuptools_scm: https://github.com/pypa/setuptools_scm/ +.. _Read the Docs site: https://pytest-dependency.readthedocs.io/ +.. _Apache License: https://www.apache.org/licenses/LICENSE-2.0 diff --git a/doc/.gitignore b/doc/.gitignore new file mode 100644 index 0000000..431ef05 --- /dev/null +++ b/doc/.gitignore @@ -0,0 +1,18 @@ +/devhelp/ +/dirhtml/ +/doctrees/ +/epub/ +/gettext/ +/html/ +/htmlhelp/ +/json/ +/latex/ +/linkcheck/ +/man/ +/pickle/ +/pseudoxml/ +/qthelp/ +/singlehtml/ +/texinfo/ +/text/ +/xml/ diff --git a/doc/Makefile b/doc/Makefile index 308a023..2d289ba 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -1,48 +1,27 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -PAPER = a4 - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) src - -# Subdirectories of src that are supposed to be there but that may be -# empty and may thus be missing after a git checkout. -SRCDIRS = src/_static src/_templates - +SPHINXPROJ = pytest-dependency +SOURCEDIR = src +BUILDDIR = . +BUILDERS = html dirhtml singlehtml htmlhelp qthelp devhelp epub \ + latex latexpdf man texinfo text gettext linkcheck xml \ + json pickle -html: $(SRCDIRS) - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) html +# Subdirectories of the source directory that are supposed to be there +# but that may be empty and may thus be missing after a git checkout. +STATIC_SOURCEDIRS = $(SOURCEDIR)/_static $(SOURCEDIR)/_templates -latex: $(SRCDIRS) - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) latex -latexpdf: latex - $(MAKE) -C latex all-pdf +help: + $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -linkcheck: $(SRCDIRS) - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) linkcheck +$(BUILDERS): $(STATIC_SOURCEDIRS) + $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -doctest: $(SRCDIRS) - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) doctest - -clean: - rm -f *~ examples/*~ src/*~ - -distclean: clean - rm -rf doctrees html latex linkcheck doctest - rm -f examples/*.pyc - rm -rf examples/__pycache__ - -src/_static: - mkdir $@ +distclean: + rm -rf doctrees $(BUILDERS) -src/_templates: +$(STATIC_SOURCEDIRS): mkdir $@ -.PHONY: html latex latexpdf linkcheck doctest clean distclean +.PHONY: help distclean $(BUILDERS) diff --git a/doc/examples/debugging-logging.out b/doc/examples/debugging-logging.out new file mode 100644 index 0000000..5fe0db0 --- /dev/null +++ b/doc/examples/debugging-logging.out @@ -0,0 +1,325 @@ +$ pytest --log-cli-format='%(levelname)s: %(message)s' --log-cli-level=DEBUG debugging.py +============================= test session starts ============================== +platform linux -- Python 3.10.1, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 +rootdir: /home/user +plugins: dependency-0.6.0 +collected 25 items + +debugging.py::test_a +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_a passed in session scope +DEBUG: register setup test_a passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_a skipped in session scope +DEBUG: register call test_a skipped in module scope +XFAIL (deliberate fail) [ 4%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_a passed in session scope +DEBUG: register teardown test_a passed in module scope + +debugging.py::test_b +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_b passed in session scope +DEBUG: register setup test_b passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_b passed in session scope +DEBUG: register call test_b passed in module scope +PASSED [ 8%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_b passed in session scope +DEBUG: register teardown test_b passed in module scope + +debugging.py::test_c +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_c in module scope ... +DEBUG: ... test_a has not succeeded +INFO: skip test_c because it depends on test_a +DEBUG: register setup debugging.py::test_c skipped in session scope +DEBUG: register setup test_c skipped in module scope +SKIPPED (test_c depends on test_a) [ 12%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_c passed in session scope +DEBUG: register teardown test_c passed in module scope + +debugging.py::test_d +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_d in module scope ... +DEBUG: ... test_b succeeded +DEBUG: register setup debugging.py::test_d passed in session scope +DEBUG: register setup test_d passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_d passed in session scope +DEBUG: register call test_d passed in module scope +PASSED [ 16%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_d passed in session scope +DEBUG: register teardown test_d passed in module scope + +debugging.py::test_e +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_e in module scope ... +DEBUG: ... test_b succeeded +DEBUG: ... test_c has not succeeded +INFO: skip test_e because it depends on test_c +DEBUG: register setup debugging.py::test_e skipped in session scope +DEBUG: register setup test_e skipped in module scope +SKIPPED (test_e depends on test_c) [ 20%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_e passed in session scope +DEBUG: register teardown test_e passed in module scope + +debugging.py::TestClass::test_a +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::TestClass::test_a passed in session scope +DEBUG: register setup TestClass::test_a passed in module scope +DEBUG: register setup test_a passed in class scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::TestClass::test_a passed in session scope +DEBUG: register call TestClass::test_a passed in module scope +DEBUG: register call test_a passed in class scope +PASSED [ 24%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::TestClass::test_a passed in session scope +DEBUG: register teardown TestClass::test_a passed in module scope +DEBUG: register teardown test_a passed in class scope + +debugging.py::TestClass::test_b +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::TestClass::test_b passed in session scope +DEBUG: register setup TestClass::test_b passed in module scope +DEBUG: register setup test_b passed in class scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::TestClass::test_b skipped in session scope +DEBUG: register call TestClass::test_b skipped in module scope +DEBUG: register call test_b skipped in class scope +XFAIL (deliberate fail) [ 28%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::TestClass::test_b passed in session scope +DEBUG: register teardown TestClass::test_b passed in module scope +DEBUG: register teardown test_b passed in class scope + +debugging.py::TestClass::test_c +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_c in module scope ... +DEBUG: ... test_b succeeded +DEBUG: register setup debugging.py::TestClass::test_c passed in session scope +DEBUG: register setup TestClass::test_c passed in module scope +DEBUG: register setup test_c passed in class scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::TestClass::test_c passed in session scope +DEBUG: register call TestClass::test_c passed in module scope +DEBUG: register call test_c passed in class scope +PASSED [ 32%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::TestClass::test_c passed in session scope +DEBUG: register teardown TestClass::test_c passed in module scope +DEBUG: register teardown test_c passed in class scope + +debugging.py::test_colors[RED] +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_colors[RED] passed in session scope +DEBUG: register setup test_colors[RED] passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_colors[RED] passed in session scope +DEBUG: register call test_colors[RED] passed in module scope +PASSED [ 36%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_colors[RED] passed in session scope +DEBUG: register teardown test_colors[RED] passed in module scope + +debugging.py::test_colors[GREEN] +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_colors[GREEN] passed in session scope +DEBUG: register setup test_colors[GREEN] passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_colors[GREEN] passed in session scope +DEBUG: register call test_colors[GREEN] passed in module scope +PASSED [ 40%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_colors[GREEN] passed in session scope +DEBUG: register teardown test_colors[GREEN] passed in module scope + +debugging.py::test_colors[BLUE] +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_colors[BLUE] passed in session scope +DEBUG: register setup test_colors[BLUE] passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_colors[BLUE] passed in session scope +DEBUG: register call test_colors[BLUE] passed in module scope +PASSED [ 44%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_colors[BLUE] passed in session scope +DEBUG: register teardown test_colors[BLUE] passed in module scope + +debugging.py::test_multicolored +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_multicolored in module scope ... +DEBUG: ... test_colors is unknown +INFO: skip test_multicolored because it depends on test_colors +DEBUG: register setup debugging.py::test_multicolored skipped in session scope +DEBUG: register setup test_multicolored skipped in module scope +SKIPPED (test_multicolored depends on test_colors) [ 48%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_multicolored passed in session scope +DEBUG: register teardown test_multicolored passed in module scope + +debugging.py::test_alert +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_alert in module scope ... +DEBUG: ... test_colors[Color.RED] is unknown +INFO: skip test_alert because it depends on test_colors[Color.RED] +DEBUG: register setup debugging.py::test_alert skipped in session scope +DEBUG: register setup test_alert skipped in module scope +SKIPPED (test_alert depends on test_colors[Color.RED]) [ 52%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_alert passed in session scope +DEBUG: register teardown test_alert passed in module scope + +debugging.py::test_g +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_g in module scope ... +DEBUG: ... test_f is unknown +INFO: skip test_g because it depends on test_f +DEBUG: register setup debugging.py::test_g skipped in session scope +DEBUG: register setup test_g skipped in module scope +SKIPPED (test_g depends on test_f) [ 56%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_g passed in session scope +DEBUG: register teardown test_g passed in module scope + +debugging.py::test_h +-------------------------------- live log setup -------------------------------- +DEBUG: register setup h passed in session scope +DEBUG: register setup h passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call h passed in session scope +DEBUG: register call h passed in module scope +PASSED [ 60%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown h passed in session scope +DEBUG: register teardown h passed in module scope + +debugging.py::test_k +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_k in module scope ... +DEBUG: ... test_b succeeded +DEBUG: register setup debugging.py::test_k passed in session scope +DEBUG: register setup test_k passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_k skipped in session scope +DEBUG: register call test_k skipped in module scope +SKIPPED (could not import 'fleet': No module named 'fleet') [ 64%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_k passed in session scope +DEBUG: register teardown test_k passed in module scope + +debugging.py::test_l[0] +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_l[0] passed in session scope +DEBUG: register setup test_l[0] passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_l[0] passed in session scope +DEBUG: register call test_l[0] passed in module scope +PASSED [ 68%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_l[0] passed in session scope +DEBUG: register teardown test_l[0] passed in module scope + +debugging.py::test_q[0] +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_q[0] in module scope ... +DEBUG: ... test_p is unknown +INFO: skip test_q[0] because it depends on test_p +DEBUG: register setup debugging.py::test_q[0] skipped in session scope +DEBUG: register setup test_q[0] skipped in module scope +SKIPPED (test_q[0] depends on test_p) [ 72%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_q[0] passed in session scope +DEBUG: register teardown test_q[0] passed in module scope + +debugging.py::test_l[1] +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_l[1] passed in session scope +DEBUG: register setup test_l[1] passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_l[1] passed in session scope +DEBUG: register call test_l[1] passed in module scope +PASSED [ 76%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_l[1] passed in session scope +DEBUG: register teardown test_l[1] passed in module scope + +debugging.py::test_q[1] +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_q[1] in module scope ... +DEBUG: ... test_p is unknown +INFO: skip test_q[1] because it depends on test_p +DEBUG: register setup debugging.py::test_q[1] skipped in session scope +DEBUG: register setup test_q[1] skipped in module scope +SKIPPED (test_q[1] depends on test_p) [ 80%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_q[1] passed in session scope +DEBUG: register teardown test_q[1] passed in module scope + +debugging.py::test_m +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_m in session scope ... +DEBUG: ... test_b is unknown +INFO: skip test_m because it depends on test_b +DEBUG: register setup debugging.py::test_m skipped in session scope +DEBUG: register setup test_m skipped in module scope +SKIPPED (test_m depends on test_b) [ 84%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_m passed in session scope +DEBUG: register teardown test_m passed in module scope + +debugging.py::test_o +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_o in module scope ... +DEBUG: ... test_h is unknown +INFO: skip test_o because it depends on test_h +DEBUG: register setup debugging.py::test_o skipped in session scope +DEBUG: register setup test_o skipped in module scope +SKIPPED (test_o depends on test_h) [ 88%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_o passed in session scope +DEBUG: register teardown test_o passed in module scope + +debugging.py::test_p +-------------------------------- live log setup -------------------------------- +DEBUG: register setup debugging.py::test_p passed in session scope +DEBUG: register setup test_p passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call debugging.py::test_p passed in session scope +DEBUG: register call test_p passed in module scope +PASSED [ 92%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_p passed in session scope +DEBUG: register teardown test_p passed in module scope + +debugging.py::test_r +-------------------------------- live log setup -------------------------------- +DEBUG: register setup r passed in session scope +DEBUG: register setup r passed in module scope +-------------------------------- live log call --------------------------------- +DEBUG: register call r passed in session scope +DEBUG: register call r passed in module scope +PASSED [ 96%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown r passed in session scope +DEBUG: register teardown r passed in module scope + +debugging.py::test_s +-------------------------------- live log setup -------------------------------- +DEBUG: check dependencies of test_s in module scope ... +DEBUG: ... test_l is unknown +INFO: skip test_s because it depends on test_l +DEBUG: register setup debugging.py::test_s skipped in session scope +DEBUG: register setup test_s skipped in module scope +SKIPPED (test_s depends on test_l) [100%] +------------------------------ live log teardown ------------------------------- +DEBUG: register teardown debugging.py::test_s passed in session scope +DEBUG: register teardown test_s passed in module scope + + +================== 12 passed, 11 skipped, 2 xfailed in 0.09s =================== diff --git a/doc/examples/debugging-summary.out b/doc/examples/debugging-summary.out new file mode 100644 index 0000000..9bd860a --- /dev/null +++ b/doc/examples/debugging-summary.out @@ -0,0 +1,22 @@ +$ pytest -rs debugging.py +============================= test session starts ============================== +platform linux -- Python 3.10.1, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 +rootdir: /home/user +plugins: dependency-0.6.0 +collected 25 items + +debugging.py x.s.s.x....sss.s.s.sss..s [100%] + +=========================== short test summary info ============================ +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_c depends on test_a +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_e depends on test_c +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_multicolored depends on test_colors +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_alert depends on test_colors[Color.RED] +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_g depends on test_f +SKIPPED [1] debugging.py:15: could not import 'fleet': No module named 'fleet' +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_q[0] depends on test_p +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_q[1] depends on test_p +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_m depends on test_b +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_o depends on test_h +SKIPPED [1] /usr/lib/python3.10/site-packages/pytest_dependency.py:101: test_s depends on test_l +================== 12 passed, 11 skipped, 2 xfailed in 0.05s =================== diff --git a/doc/examples/debugging-verbose.out b/doc/examples/debugging-verbose.out new file mode 100644 index 0000000..2324b98 --- /dev/null +++ b/doc/examples/debugging-verbose.out @@ -0,0 +1,35 @@ +$ pytest --verbose debugging.py +============================= test session starts ============================== +platform linux -- Python 3.10.1, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /usr/bin/python3 +cachedir: .pytest_cache +rootdir: /home/user +plugins: dependency-0.6.0 +collecting ... collected 25 items + +debugging.py::test_a XFAIL (deliberate fail) [ 4%] +debugging.py::test_b PASSED [ 8%] +debugging.py::test_c SKIPPED (test_c depends on test_a) [ 12%] +debugging.py::test_d PASSED [ 16%] +debugging.py::test_e SKIPPED (test_e depends on test_c) [ 20%] +debugging.py::TestClass::test_a PASSED [ 24%] +debugging.py::TestClass::test_b XFAIL (deliberate fail) [ 28%] +debugging.py::TestClass::test_c PASSED [ 32%] +debugging.py::test_colors[RED] PASSED [ 36%] +debugging.py::test_colors[GREEN] PASSED [ 40%] +debugging.py::test_colors[BLUE] PASSED [ 44%] +debugging.py::test_multicolored SKIPPED (test_multicolored depends o...) [ 48%] +debugging.py::test_alert SKIPPED (test_alert depends on test_colors[...) [ 52%] +debugging.py::test_g SKIPPED (test_g depends on test_f) [ 56%] +debugging.py::test_h PASSED [ 60%] +debugging.py::test_k SKIPPED (could not import 'fleet': No module na...) [ 64%] +debugging.py::test_l[0] PASSED [ 68%] +debugging.py::test_q[0] SKIPPED (test_q[0] depends on test_p) [ 72%] +debugging.py::test_l[1] PASSED [ 76%] +debugging.py::test_q[1] SKIPPED (test_q[1] depends on test_p) [ 80%] +debugging.py::test_m SKIPPED (test_m depends on test_b) [ 84%] +debugging.py::test_o SKIPPED (test_o depends on test_h) [ 88%] +debugging.py::test_p PASSED [ 92%] +debugging.py::test_r PASSED [ 96%] +debugging.py::test_s SKIPPED (test_s depends on test_l) [100%] + +================== 12 passed, 11 skipped, 2 xfailed in 0.06s =================== diff --git a/doc/examples/debugging.py b/doc/examples/debugging.py new file mode 100644 index 0000000..1627015 --- /dev/null +++ b/doc/examples/debugging.py @@ -0,0 +1,113 @@ +from enum import Enum +import pytest + + +class Color(Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + def __str__(self): + return self.name + + +def get_starship(name): + fleet = pytest.importorskip("fleet") + return fleet.get_ship(name) + + +@pytest.fixture(scope="module", params=range(2)) +def prepenv(request): + pass + +@pytest.mark.dependency() +@pytest.mark.xfail(reason="deliberate fail") +def test_a(): + assert False + +@pytest.mark.dependency() +def test_b(): + pass + +@pytest.mark.dependency(depends=["test_a"]) +def test_c(): + pass + +@pytest.mark.dependency(depends=["test_b"]) +def test_d(): + pass + +@pytest.mark.dependency(depends=["test_b", "test_c"]) +def test_e(): + pass + + +class TestClass(object): + + @pytest.mark.dependency() + def test_a(self): + pass + + @pytest.mark.dependency() + @pytest.mark.xfail(reason="deliberate fail") + def test_b(self): + assert False + + @pytest.mark.dependency(depends=["test_b"]) + def test_c(self): + pass + + +@pytest.mark.dependency() +@pytest.mark.parametrize("c", [ Color.RED, Color.GREEN, Color.BLUE, ]) +def test_colors(c): + pass + +@pytest.mark.dependency(depends=["test_colors"]) +def test_multicolored(): + pass + +@pytest.mark.dependency(depends=["test_colors[Color.RED]"]) +def test_alert(): + pass + +@pytest.mark.dependency(depends=["test_f"]) +def test_g(): + pass + +@pytest.mark.dependency(name="h") +def test_h(): + pass + +@pytest.mark.dependency(depends=["test_b"]) +def test_k(): + s = get_starship("NCC-1701") + +@pytest.mark.dependency() +def test_l(prepenv): + pass + +@pytest.mark.dependency(depends=["test_b"], scope='session') +def test_m(): + pass + +@pytest.mark.dependency(depends=["test_h"]) +def test_o(): + pass + +@pytest.mark.dependency() +def test_p(): + pass + +@pytest.mark.dependency(depends=["test_p"]) +def test_q(prepenv): + pass + +@pytest.mark.dependency(depends=["test_a"]) +@pytest.mark.dependency(name="r") +def test_r(): + pass + +@pytest.mark.dependency(depends=["test_l"]) +def test_s(): + pass diff --git a/doc/examples/mark-class.py b/doc/examples/mark-class.py new file mode 100644 index 0000000..7cb944a --- /dev/null +++ b/doc/examples/mark-class.py @@ -0,0 +1,21 @@ +import pytest + + +@pytest.mark.dependency() +@pytest.mark.xfail(reason="deliberate fail") +def test_f(): + assert False + + +@pytest.mark.dependency(depends=["test_f"]) +class TestClass(object): + + def test_a(self): + pass + + @pytest.mark.dependency() + def test_b(self): + pass + + def test_c(self): + pass diff --git a/doc/examples/or_dependency.py b/doc/examples/or_dependency.py new file mode 100644 index 0000000..803e2a7 --- /dev/null +++ b/doc/examples/or_dependency.py @@ -0,0 +1,52 @@ +import pytest +from pytest_dependency import depends + +def depends_or(request, other, scope='module'): + item = request.node + for o in other: + try: + depends(request, [o], scope) + except pytest.skip.Exception: + continue + else: + return + pytest.skip("%s depends on any of %s" % (item.name, ", ".join(other))) + + +@pytest.mark.dependency() +def test_ap(): + pass + +@pytest.mark.dependency() +@pytest.mark.xfail(reason="deliberate fail") +def test_ax(): + assert False + +@pytest.mark.dependency() +def test_bp(): + pass + +@pytest.mark.dependency() +@pytest.mark.xfail(reason="deliberate fail") +def test_bx(): + assert False + +@pytest.mark.dependency() +def test_c(request): + depends_or(request, ["test_ax", "test_bx"]) + pass + +@pytest.mark.dependency() +def test_d(request): + depends_or(request, ["test_ax", "test_bp"]) + pass + +@pytest.mark.dependency() +def test_e(request): + depends_or(request, ["test_ap", "test_bx"]) + pass + +@pytest.mark.dependency() +def test_f(request): + depends_or(request, ["test_ap", "test_bp"]) + pass diff --git a/doc/src/08A1264175343E6E.pub b/doc/src/08A1264175343E6E.pub new file mode 100644 index 0000000..6ec4cf1 --- /dev/null +++ b/doc/src/08A1264175343E6E.pub @@ -0,0 +1,98 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBFgqIDYBEADs08vJtPmKgfaSkKTtHWaYOmCchuXtaTd766b3tDaL55XfTucS +OGVUk1hbaMl8aAZoEmwCDIIMFUGL21QNdZTPLOWv29qXNnUjQEB7vUoJvafZPzTh ++38PDkOeLCQ67J/OSHkRQvEFGc6EXNo/GbtZ1oAGQXFeMbcFg7EuEQvEbHxyJAMo +6a+q9fHB/YkTJVkMFkPnBw2Lbywa5LVJ6ZZWk+IyxbeIZhcjTkpaZMSQhNJAF14q +4uKOGhVupfKZmvwnEvrNwp5ImTiiu6mRuywVDwxXUVersCuIhL/ZQeKI1ImboxW3 +v3p0msxzPaGi0tyb4H7mi/W5rat0u+Zm2EaxYjJDADaNSluxZweUgCi2V/tCzZtP +FgnElFYPHphay8rZ8bbcw5YfcFrC9tdF0sDD1p7qmpOnVlKPw67RwS148+ew7sxy +gW56qJP6AgbVVInJU2XNziphpPBbOYkPhYEa0HsAO62ubESPHJHTHuaMe0m+BbNh +bS2jtPrCaSC5ci6fsVsz0nl5zIFmpDOWKzHNjl5rCcQeV6WXCXeW+f8OfFTe0BXb +nn2qTq2nY+QMNXMiJ0RKwD/XCCphXLlkN5Oflt3JBAEMFMDbptUf8Zd74+xrdjQj +byFu3FLB+3h1oMX+7heoul4LQVbE/qH/fkUIPsSky51I4XWLlgnVy1PQKwARAQAB +tB1Sb2xmIEtyYWhsIDxyb2xmQHJvdGtyYXV0LmRlPokCPAQTAQIAJgIbAwcLCQgH +AwIBBhUIAgkKCwQWAgMBAh4BAheABQJYKisKAhkBAAoJEAihJkF1ND5uLzQQALnu +VyxylxLRlSQ16KAA8mfX2BVcC+3IMj9IyYYIj5Vt7BXbFtBS+sAKQ9sqXVpHzob0 +s5Rfat6sFimO0TDeNlhnS1hRZ1USQJUJ+V8c3L6OmpyNemG3/6Qi0iNYazfE71TA +Km4mW3wgPq8H0/hnKe7tEggPkrhENbo7JDideQ1wAVT9KFu49M2Vm80hUVHiEQh+ +1VbThRmzJjINCiYALcHMo7ZO5uNBenn9zetlKAYpLnRApmPuyAi0jisheJs1gap5 +EtDbcxsD1bqtOKVnJIWqAB7yJDThdDENyo93y2w9BMycobNl3eL7MoB6Ep+Rc8Ae +XIF/HwFtK0j84B+2dUvUHOQZuX3Qtlgu1FenwlOe8u0nwwur1q/Nz7YWg7anagn9 +yA+uDAsO5eclGAqTT5PHjSaEiPtANmRRKijCeHjB7OVTgtT1nY6ThFurDeFnk55J +mTmcaQg9ZPSKeCfWwC+BX+p/9V9Pz5LVa9tyFdxHJERxze4CrwcQkQLFSH+CavLa +iWDLyiAydEa0XU6MU5EmrkwUIHLKzC7ZzwuLkQhC8PXZCifTSkHP7gH4uUUMo2nK +6JQnRC+ylOlyBWSVlzkhYXlKcAbmrw095z70OKx4GVFyBfi3PLjceHjNvA/QPCQR +FIEh65MUYDtSz7LpUwvFYUWVFSuV7zzHlHiT3kYIiEYEExECAAYFAlgqLe8ACgkQ +UcvGPyCdlGbVYwCfYse0zET860S+E71M3Tlfp4brzOIAoOlm+m7wGU6DzTjGyGCa +zPLpoBq4iQEcBBMBAgAGBQJYKi5TAAoJEIPzNkMsf8yRK+sIAKC9HqZY/f5po06Z +gy1rWF5qDBA5GxehwYiD3kZ1EhbINjPbujNLIvpzTce2T4mhu1Ibk8fahu/Viwjy +7n0b0zf9xSiQuUVftopHOf0jfx7oGCVTOMbcENV/UN7SoXCBiKMDDlcfd9Kdt8Ft +nGyqxod3snJMn6G1VKTBCyLECh7BvyBE4oOQhyPgFYrIKwymOq+mBSPd9j3QjvRu +xpqteVOr32IYDYNbcxPIu2q6uydnjZxXdlvuo0xl8wwJgka4zGKj0DboajZyMEvR +OKbs5dQ6pwzRnxvJMxWSB/q4MOWOvtJcVyJP2eYLgE/dcMgN8UgUUjkcJ8G2eSv2 +DgunBHu0H1JvbGYgS3JhaGwgPHJvbGZAdW5pLWJyZW1lbi5kZT6JAjkEEwECACMF +AlgqIb8CGwMHCwkIBwMCAQYVCAIJCgsEFgIDAQIeAQIXgAAKCRAIoSZBdTQ+biHz +D/4pB8FRxJdNYegk5B1HlwzKBrmR/9HW6iJa4MinxOyiPNew3mQRzw/FBM9w1F8h +TL5s+28u/swjwntNLbI5QMYQ+JcSRgmkmzP0lPSS0i+fPm/QjpAsZ/8HTAIUIVPX +6HHOUbCtcqKVePeSIxuubF0QX2dB4uu2BhEdOcaqBkopL0QMTZx4qAaGfh0tKWc5 +Q6genqXmD24cCSezfGwnQrd/2KvgyWkf7OltZHnyOUmoaAamujkttbtVETeybbY7 +GG73nc/Nv40ypzLQmqK6GK2pUmMoM2k3KdsInxTuBWsKjcGLU/FfvPSMsXwaaCHx +N+QByU1d3rc/RDkrw6oHnrGqXQDwvKGpJ3bcAaUpshI/ywRPUUiMOlUPvmMHg4wV +nKSrwJs+9hsf6v930E3Wku3Ohuqf8I7AaMFSSop8McemZQuoMgGSzCuF8rdOlvgj +wY6mOuQ8jSUtvNi6+8NC+tL1DM4p90fx/kn4vI6TJ+34lZ/hMiRj6JH4bbTLEfdS +PL3M3vmh7xl6im2WsBnJaCFpmml7sgIGWIoy8A8dAOEkfmb7kZtGYZwqBSIex4Hu +WdfeEQifzoxBX2YAJjYdMJUUf217+x06s1ML4uChg9csBrphXnMmTzHFXsdI78hh +Cf/f7I/o+mAO49ww4DaZV1SQXjoFQ6iCamsiJaH1nKhnXYhGBBMRAgAGBQJYKi30 +AAoJEFHLxj8gnZRmG5AAoOM1SRMiNBJeR5OeYwsBGuan0ElmAJ4sb+N0emeAImsJ +k1JH3Fuht4/ixIkBGwQTAQIABgUCWCouWAAKCRCD8zZDLH/MkZFMB/QMFHwGT1km +DFDmZKkChCWnL0P5aopsMaCxqx3Ru5aqO87QDRjLOHSK8Emt5sp0Pz1FGJxAUOby +bCll0yG9qFSOU51QQQYEKUSELpMyT4/PdmzGPA8yYclrnq/BJ1cwPnZjw4soRZrL +PiFRYvSb16PF29xGTQF0ejFjSnBnCMNGeoIExtlW5hyGwDoZ1cJgvOqd/o5hyy5D +TxvEFtri0Bx3nCyGrERbH6HNP6Of0+D+MiH/IQKDkfMHaMiR1hHyb4facBALD66e +1dZLp3eKDQdvVZBP2NGW+BOpVLG6Zs1tbGdVCXq6CJEG+zM3xFaUNtAXck09zub3 +IF+F44NY+yu0H1JvbGYgS3JhaGwgPFJvbGYuS3JhaGxAZ214Lm5ldD6JAjkEEwEC +ACMFAlgqIZ4CGwMHCwkIBwMCAQYVCAIJCgsEFgIDAQIeAQIXgAAKCRAIoSZBdTQ+ +bjG8EADiP2oUqpgTz80cy193k7Lo+NEvcEkNgh7bc6W7dLwz7vL7cMBQ9K0qAQjq +TeGHucOM5ju7iDDPOVhchRJFCRl+rg35WaFCmiDd2x4j8c28sSrI+znj9dw5vLfk +8dShS6Ux6ocgLHwd5jh2ErAb3AKEGyVysA+0wydT/44jiLWjRSEF6qIDTmuxsc3L ++/f2ZBHtwRMI2fHWuosXY7Wkb8XTq/4kGgEL/DJrs0AQv/Ww17hfQhf1SYAQSgEK +DC90KsQzJzvAgsplQcJovwKFEgQUZ6ox5x/jhqnj1It0WYALJ4s0qUKOLqi+RKcZ +swpBb0Aq45UMu0KJ+EKWtGG6snfu/d00K+KncS8qBav7ciZ5EP13TdKJNwOcKd8i +NiSnP86yvNHZ5gST/++M8wB9NjhHbf50+JxQ0lJTg6o8rW2IdTjiHUXf5Z3YmwgS +WmH7byhn1leTces2NfsCGKN6j6/0YwKUyOG8XpgAiY34TLXMZbI4zsV07aJY9vRY +Vn45Co0nwOBbl3WMattWg94myQrIyMkEnETStzsO7s/jOIVW8bS4OXdOZg8DY9PE +71k8XQA9Puc5NucYioFosyNWikGoU058qoTQzVXpSgoI9cdTniZGfHoguBqr7X2L +NaX75L5dnwPepM1Z5iFl3P5aBB6rnkW5ZEkftXTOmt8doqj2SohGBBMRAgAGBQJY +Ki30AAoJEFHLxj8gnZRmd1gAoIgAm3VJlohlxaxL4Rk4JaRSvjTcAKDEq8E7j6S6 +4f8JiP2sciLBjUR814kBHAQTAQIABgUCWCouWAAKCRCD8zZDLH/MkU9HB/4p5G0P +O/u5P+hkiCPIZiiMo1DmvuQkQZEDqDobnHaxB5t2SyfdPyM9VJ5rOVSgXwYfxXSJ +3A6eHW5sHtjWqa/eD0dosY+lVJPA/dbJ9RNCPU1YgpfOZSiYtJiAIC3kRd2e4mPW +WwxJqXvTTy+T3aDwt1mfJR8SG1UP2+aiGCVbUWbo1y2EfHUBRc981pKCMERsgemh +WggvRYNkbfyuSF5Mt32beXx91W7TE7hoivKsngzbO82taD9asCSFzmZn3C5BfYth +E3iqb2EmSUbQkRzhfXk7RxoxY7RWeDodFXkRzVbwBXFY3Q7XfRNhtVA7d0O+zIWq +p2DtvLsITicrQxS8uQINBFgqIDYBEADU/dsD2NXhQVQZL12c8Yt6Nj/Ma73B/SaP +wSmyF7f0pPO3sLO0r5wOebolwbGytIh52G/7i1ZdFNJ+D06tbGNc5CUgkgcNTY+x +ND4zazWoaooEsx2TPYqDqSmJbEjeDvr//QvCQ8x7ky1p/iAEvCuFSFiAH0HGXuBU +MiM/MoBC7W3tyS9pbDy43+uphwO0A24Mrxem84zvHBFBFI/pinrp4laU1PMVaSSc +q82ENCGjXhvF8Ummq1F1XclQ+pza+uGBChz6QMG8EB2WRlFQqoR9xa+K3UmC+UO6 +fAkMx8p/LAuAABGphAAGZc011yuYS9uCh9zNFyBOakqLJRP4+faRWxWkzD8d2Cq+ +9I9CpiM0a3bC56VKPFeic8Xwvd81wDIp39eha8OQpMvSZp54j84hDP83IBl8D/Yl +vzKO3Xag1Eg0c92N1mNbLhV75oAbmUgOrJktg2bJjRaUOGN+KuwKUvTUawsIBIAW +xIZP2rLhT21ffSEYC/TZYD39O748lUgcBDH/czTfH+pf1mapjOFaLD+c4lA/hN19 +Su6RqJ8oYTMNsFNWzMmUAsWrCSFnGFhXeOW6kH7AqJ6FMHnsAiCHuhJGKi0Qe76n +wBDr0ut1XqELHK0hC+LJlHufbfsniuG8QqLzmmWr8aX64pyPU8FtB1hYG8tBjkqj +fKIbRKnsBwARAQABiQIfBBgBAgAJBQJYKiA2AhsMAAoJEAihJkF1ND5uOoYQAIgg +DsLqsjW0awdhf/nNK3haDbvYQhtmOmskcKQW7fQXHm0kqGoIliQN57fJNH88OT+9 +Rp6wYjKPblVSNIbcQ8M2nXEVkNtAtO7uC2WVcZeFNVQIsrXX22302riADy7IK51f +RILnCdyrePehxfyscw0VhJIYDx76iZVM0lNKfuZj00CLXOG76V5/T1o4uu8UAVe5 +cW/astcA+u8pU2u7dyUMcaU9kK0rDz2HI+b1CMs+EGPnEVOqML+RxGTIufYUoqSX +tWHXsMSWq6IK22A42m4E6HNtcNAKUK7jGLoOdHA0QaFVxHmajG+pUM9MWTcvZrsn +ZFDlDhyqrF4LBPLG5vk+C4HxdOIEWC+X5GrdESjYWxv14M0ZxustEGkf5VQHdbgE +W+RcqkwkEbiDjLoaFvOZu+NSy0EXxDtBTlkllBJMn01IAxAVgP8M7T/fZ7lg4uLz +RBRhm0T9/IvWjAx0wsiNr45iJ++5sJCQWxU4lWFjSrACPzCY0fP2nDMMBfkn58w/ +r0/tDFumEvBXHCVzdhLzUbetrzmNwGaZdg14ahlEnbbhQktYc+rWbE+ObX74WwHk +0ziR94MVr/9v144v0GuvBZaycUfe1ljHiPjMnjroODyZ48NjRWY83NolEaQhg+Zf +BzxtQzjcEYPj7MEcrHXhzM7gQQ8wnzhc39eK6bKv +=IIFi +-----END PGP PUBLIC KEY BLOCK----- diff --git a/doc/src/about.rst b/doc/src/about.rst index b46b4e2..c2f006d 100644 --- a/doc/src/about.rst +++ b/doc/src/about.rst @@ -69,15 +69,12 @@ those results that really matter. Copyright and License --------------------- -- Copyright 2013-2015 +- Copyright 2013–2015 Helmholtz-Zentrum Berlin für Materialien und Energie GmbH -- Copyright 2016-2018 Rolf Krahl +- Copyright 2016–2023 Rolf Krahl -Licensed under the Apache License, Version 2.0 (the "License"); you -may not use this file except in compliance with the License. You may -obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 +Licensed under the `Apache License`_, Version 2.0 (the "License"); you +may not use this file except in compliance with the License. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -87,3 +84,4 @@ permissions and limitations under the License. .. _pytest: http://pytest.org/ +.. _Apache License: https://www.apache.org/licenses/LICENSE-2.0 diff --git a/doc/src/advanced.rst b/doc/src/advanced.rst index f5ef02a..77c67d9 100644 --- a/doc/src/advanced.rst +++ b/doc/src/advanced.rst @@ -18,7 +18,7 @@ Consider the following example test module: In principle, this example works the very same way as the basic example for :ref:`usage-parametrized`. The only difference is that -the lists of paramters are dynamically compiled beforehand. The test +the lists of parameters are dynamically compiled beforehand. The test for child `l` deliberately fails, just to show the effect. As a consequence, the test for its parent `d` will be skipped. @@ -80,3 +80,39 @@ It requires the parameter values to be scalars that can easily be converted to strings. And it will fail if the same list of parameters is passed to the same test more then once, because then, pytest will add an index to the name to disambiguate the parameter values. + +Logical combinations of dependencies +------------------------------------ + +The dependencies passed as in the `depends` argument to the +:func:`pytest.mark.dependency` marker are combined in an and-like +manner: the current test is skipped unless *all* dependencies did +succeed. Sometimes one may want to combine the dependencies in a +different way. This is not supported by pytest-dependency out of the +box, but it is not difficult to implement. Consider the following +example: + +.. literalinclude:: ../examples/or_dependency.py + +The helper function `depends_or()` is similar to +:func:`pytest_dependency.depends`, it takes the same arguments. The +only difference is that it combines the dependencies passed in the +`other` argument in an or-like manner: the current test will be run if +*at least one* of the other tests did succeed. + +The tests `test_c`, `test_d`, `test_e`, and `test_f` in this example +all depend on two other tests. Only `test_c` will be skipped, because +all tests in its dependency list fail. The other ones are run, +because they have at least one succeeding test in their dependency +list. + +Other logical combinations of dependencies are conceivable and may be +implemented in a similar way, according to the use case at hand. + +.. note:: + The `depends_or()` helper function above is based on pytest + internals: skipping of tests works by raising an exception and the + exception class is exposed as :attr:`pytest.skip.Exception`. This + is not documented in pytest. It has been tested to work for pytest + versions 3.7.0 through 6.2.5, but it is not guaranteed to be stable + for future pytest versions. diff --git a/doc/src/changelog.rst b/doc/src/changelog.rst index ed371c1..d76c92b 100644 --- a/doc/src/changelog.rst +++ b/doc/src/changelog.rst @@ -1,129 +1 @@ -History of changes to pytest-dependency -======================================= - -0.5.1 (2020-02-14) - - Bug fixes and minor changes - + Fix failing documentation build. - -0.5.0 (2020-02-14) - New features - + `#3`_, `#35`_: add a scope to dependencies. - (Thanks to JoeSc and selenareneephillips!) - - Bug fixes and minor changes - + `#34`_: failing test with pytest 4.2.0 and newer. - + Use setuptools_scm to manage the version number. - -.. _#3: https://github.com/RKrahl/pytest-dependency/issues/3 -.. _#34: https://github.com/RKrahl/pytest-dependency/issues/34 -.. _#35: https://github.com/RKrahl/pytest-dependency/pull/35 - -0.4.0 (2018-12-02) - Incompatible changes - + Require pytest version 3.6.0 or newer. This implicitly drops - support for Python 2.6 and for Python 3.3 and older. - - Bug fixes and minor changes - + `#24`_, `#25`_: get_marker no longer available in pytest 4.0.0. - (Thanks to Rogdham!) - + `#28`_: Applying markers directly in parametrize is no - longer available in 4.0. - -.. _#24: https://github.com/RKrahl/pytest-dependency/issues/24 -.. _#25: https://github.com/RKrahl/pytest-dependency/pull/25 -.. _#28: https://github.com/RKrahl/pytest-dependency/issues/28 - -0.3.2 (2018-01-17) - Bug fixes and minor changes - + `#5`_: properly register the dependency marker. - + Do not add the documentation to the source distribution. - -.. _#5: https://github.com/RKrahl/pytest-dependency/issues/5 - -0.3.1 (2017-12-26) - Bug fixes and minor changes - + `#17`_: Move the online documentation to Read the Docs. - + Some improvements in the documentation. - -.. _#17: https://github.com/RKrahl/pytest-dependency/issues/17 - -0.3 (2017-12-26) - New features - + `#7`_: Add a configuration switch to implicitly mark all - tests. - + `#10`_: Add an option to ignore unknown dependencies. - - Incompatible changes - + Prepend the class name to the default test name for test class - methods. This fixes a potential name conflict, see `#6`_. - - If your code uses test classes and you reference test methods - by their default name, you must add the class name. E.g. if - you have something like: - - .. code-block:: python - - class TestClass(object): - - @pytest.mark.dependency() - def test_a(): - pass - - @pytest.mark.dependency(depends=["test_a"]) - def test_b(): - pass - - you need to change this to: - - .. code-block:: python - - class TestClass(object): - - @pytest.mark.dependency() - def test_a(): - pass - - @pytest.mark.dependency(depends=["TestClass::test_a"]) - def test_b(): - pass - - If you override the test name in the pytest.mark.dependency() - marker, nothing need to be changed. - - Bug fixes and minor changes - + `#11`_: show the name of the skipped test. - (Thanks asteriogonzalez!) - + `#13`_: Do not import pytest in setup.py to make it - compatible with pipenv. - + `#15`_: tests fail with pytest 3.3.0. - + `#8`_: document incompatibility with parallelization in - pytest-xdist. - + Clarify in the documentation that Python 3.1 is not officially - supported because pytest 2.8 does not support it. There is no - known issue with Python 3.1 though. - -.. _#6: https://github.com/RKrahl/pytest-dependency/issues/6 -.. _#7: https://github.com/RKrahl/pytest-dependency/issues/7 -.. _#8: https://github.com/RKrahl/pytest-dependency/issues/8 -.. _#10: https://github.com/RKrahl/pytest-dependency/issues/10 -.. _#11: https://github.com/RKrahl/pytest-dependency/pull/11 -.. _#13: https://github.com/RKrahl/pytest-dependency/issues/13 -.. _#15: https://github.com/RKrahl/pytest-dependency/issues/15 - -0.2 (2017-05-28) - New features - + `#2`_: Add documentation. - + `#4`_: Add a depend() function to add a dependency to a - test at runtime. - -.. _#2: https://github.com/RKrahl/pytest-dependency/issues/2 -.. _#4: https://github.com/RKrahl/pytest-dependency/issues/4 - -0.1 (2017-01-29) - + Initial release as an independent Python module. - - This code was first developed as part of a larger package, - python-icat, at Helmholtz-Zentrum Berlin für Materialien und - Energie, see - https://icatproject.org/user-documentation/python-icat/ +.. include:: ../../CHANGES.rst diff --git a/doc/src/conf.py b/doc/src/conf.py index 15eb749..5d517fc 100644 --- a/doc/src/conf.py +++ b/doc/src/conf.py @@ -1,35 +1,47 @@ +# -*- coding: utf-8 -*- # -# pytest-dependency documentation build configuration file. +# Configuration file for the Sphinx documentation builder. # -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config -import sys, os -import os.path +from pathlib import Path +import sys -# The top source directory. This file is exec'ed with its directory -# as cwd. This is "doc/src" relativ to the top source directory. So -# we need to go 2 dirs up. -topdir = os.path.dirname(os.path.dirname(os.getcwd())) +maindir = Path(__file__).resolve().parent.parent.parent +buildlib = maindir / "build" / "lib" +sys.path[0] = str(buildlib) +sys.dont_write_bytecode = True -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +import pytest_dependency -# -- General configuration ----------------------------------------------------- +# -- Project information ----------------------------------------------------- + +project = 'pytest-dependency' +copyright = '2016–2023, Rolf Krahl' +author = 'Rolf Krahl' + +# The full version, including alpha/beta/rc tags +release = pytest_dependency.__version__ +# The short X.Y version +version = ".".join(release.split(".")[0:2]) -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest'] +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx_copybutton', +] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -37,208 +49,123 @@ # The suffix of source filenames. source_suffix = '.rst' -# The encoding of source files. -#source_encoding = 'utf-8-sig' - # The master toctree document. master_doc = 'index' -# General information about the project. -project = u'pytest-dependency' -copyright = u'2016-2020, Rolf Krahl' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -with open(os.path.join(topdir, ".version"), "rt") as f: - release = f.read() -version = ".".join(release.split(".")[0:2]) - # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . exclude_patterns = [] -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False +# -- Options for autodoc extension ------------------------------------------- -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +autodoc_member_order = 'bysource' -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# -- Options for intersphinx extension --------------------------------------- +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), + 'pytest': ('https://docs.pytest.org/en/stable/', None), +} -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +# +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None +# +# html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} # If true, links to the reST sources are added to the pages. html_show_sourcelink = False -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'pytest-dependency-doc' +htmlhelp_basename = '%s-doc' % project -# -- Options for LaTeX output -------------------------------------------------- +# -- Options for LaTeX output ------------------------------------------------ latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + # + 'papersize': 'a4paper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'pytest-dependency.tex', u'pytest-dependency Documentation', - u'Rolf Krahl', 'manual'), + (master_doc, '%s.tex' % project, '%s Documentation' % project, + author, 'manual'), ] -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- +# -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'pytest-dependency', u'pytest-dependency Documentation', - [u'Rolf Krahl'], 1) + (master_doc, project, '%s Documentation' % project, + [author], 1) ] -# If true, show URL addresses after external links. -#man_show_urls = False - -# -- Options for Texinfo output ------------------------------------------------ +# -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'pytest-dependency', u'pytest-dependency Documentation', - u'Rolf Krahl', 'pytest-dependency', - 'Call pytest from a distutils setup.py script.', 'Miscellaneous'), + (master_doc, project, '%s Documentation' % project, + author, project, 'One line description of project.', + 'Miscellaneous'), ] -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# -- Extension configuration ------------------------------------------------- diff --git a/doc/src/configuration.rst b/doc/src/configuration.rst index b917047..d89cb31 100644 --- a/doc/src/configuration.rst +++ b/doc/src/configuration.rst @@ -21,8 +21,8 @@ Configuration file options can be set in the `ini file`. minversion This is a builtin configuration option of pytest itself. Since - pytest-dependency requires pytest 3.6.0 or newer, it is recommended - to set this option accordingly, either to 3.6.0 or to a newer + pytest-dependency requires pytest 3.7.0 or newer, it is recommended + to set this option accordingly, either to 3.7.0 or to a newer version, if required by your test code. automark_dependency @@ -47,7 +47,7 @@ The following command line options are added by pytest.dependency: if any of the dependencies has been skipped or failed. E.g. dependencies that have not been run at all will be ignored. - This may be useful if you run only a subset of the testsuite and + This may be useful if you run only a subset of the test suite and some tests in the selected set are marked to depend on other tests that have not been selected. diff --git a/doc/src/debugging.rst b/doc/src/debugging.rst new file mode 100644 index 0000000..edf7d7b --- /dev/null +++ b/doc/src/debugging.rst @@ -0,0 +1,178 @@ +Debugging guide +=============== + +It's just too easy to make errors. Sometimes, it's not obvious to +understand the behavior of pytest-dependency: test get skipped +although their dependencies succeed or the other way round, the +dependency marker does not seem have any effect and tests are executed +although their dependency fail. + +This section intends to give an overview of the tools that pytest +provides to investigate the situation in detail, which hopefully helps +to understand what happens. + +Example +------- + +We consider the following example in this guide: + +.. literalinclude:: ../examples/debugging.py + +This example contains several cases where the presumably intended +behavior of the code differs from what happens in practice. We will +show below how diagnostic tools in pytest may be used to unravel the +discrepancies. The results that may (or may not) be surprising +include: + ++ The test method `test_c` in class `TestClass` depending on `test_b` + is run, although the test method `test_b` fails. + ++ All instances of `test_colors` succeed. Yet `test_multicolored` + that only depends on `test_colors` is skipped. + ++ Similarly `test_alert` depending only on `test_colors[Color.RED]` is + skipped, although `test_colors` with the parameter value `Color.RED` + succeeds. + ++ `test_k` depending only on `test_b` is skipped, although `test_b` + succeeds. + ++ Same with `test_m` depending only on `test_b` is skipped. + ++ `test_o` depending only on `test_h` is skipped, although `test_h` + succeeds. + ++ `test_q` depending only on `test_p` is skipped, although `test_p` + succeeds. + ++ `test_r` is run, although `test_a` fails. + ++ `test_s` depending only on `test_l` is skipped, although `test_l` + succeeds. + +Diagnostic tools +---------------- + +There are different ways to request diagnostic output from pytest. We +will discuss how they may be used to better understand the behavior of +pytest-dependency. + +pytest summary +.............. + +You can request a short summary from pytest including information on +skipped tests using the ``-rs`` `command line option`__: + +.. literalinclude:: ../examples/debugging-summary.out + +This summary indicates if a test has been skipped by pytest-dependency +in the first place. In the present example, the summary hints that +`test_k` has been skipped due to another reason, unrelated to +pytest-dependency. If the test has been skipped by pytest-dependency, +the summary displays the name of the missing dependency. + +.. __: https://docs.pytest.org/en/stable/usage.html#detailed-summary-report + +Verbose pytest output +..................... + +A list of all tests with their respective outcome will be displayed if +you call pytest with the ``--verbose`` command line option: + +.. literalinclude:: ../examples/debugging-verbose.out + +The verbose listing is particular useful, because it shows the pytest +node id for each test, which is not always obvious. As explained in +Section :ref:`names`, this node id is the basis to form the default +test name that need to be used to reference the test in the +dependencies. + +From this list we can understand why `test_multicolored` has been +skipped: it depends on `test_colors`. But `test_colors` is +parametrized and thus the parameter value is included in the node id. +As a result, a dependency by the name `test_colors` can not be found. +The same thing happens in the case of `test_s`: it depends on +`test_l`, but the latter uses a parametrized fixture, so it indirectly +takes a parameter value and that value must be included in the +reference for the dependency. + +In the case of `test_alert`, the parameter value is included in the +dependency `test_colors[Color.RED]`. But in the node id as displayed +in the verbose list, that test appears as `test_colors[RED]`. Note +that `class Color` overrides the string representation operator and +that affects how the parameter value appears in the node id in this +case. + +The verbose list also displays the execution order of the tests. In +the present example, this order differs from the order in the source +code. That is the reason why both instances of `test_q` are skipped: +they are executed before the dependency `test_p`. So the outcome of +the latter is yet unknown at the moment that the dependency is +checked. + +Logging +....... + +pytest-dependency emits log messages when registering test results and +when checking dependencies for a test. You can request these log +messages to be displayed at runtime using `log command line options`__ +in the pytest call. Beware that this may produce a large amount of +output, even for medium size test suites. We will present only a few +fragments of the output here. Consider the start of that output, +covering the first test `test_a`: + +.. literalinclude:: ../examples/debugging-logging.out + :end-before: debugging.py::test_b + +It is shown how the test outcome for each of the three test phases +(setup, call, and teardown) is registered in pytest-dependency. It is +also shown which name is used to register the test outcome depending +on the scope. + +Considering the relevant fragments of the output, we can check why +`TestClass::test_c` is not skipped: + +.. literalinclude:: ../examples/debugging-logging.out + :lines: 20-31,86-116 + +The dependency `test_b` is checked in module scope. If that +dependency was meant to reference the method of the same class, it +would either need to be referenced as `test_b` in class scope or as +`TestClass::test_b` in module scope or as +`debugging.py::TestClass::test_b` in session scope. The way it is +formulated in the example, it actually references the test function +`test_b`, which succeeds. + +A similar case is `test_m`: + +.. literalinclude:: ../examples/debugging-logging.out + :lines: 20-31,264-274 + +The dependency `test_b` is checked in session scope. There is no test +that matches this name. If that dependency was mean to reference the +test function `test_b` in the example, it would either need to be +referenced as `debugging.py::test_b` in session scope or as `test_b` +in module scope. + +A slightly different situation is given in the case of `test_o`: + +.. literalinclude:: ../examples/debugging-logging.out + :lines: 190-201,276-286 + +In the :func:`pytest.mark.dependency` marker for `test_h` in the +example, the name is overridden as `h`. The outcome of that test is +registered using that name. It can thus not be found by the name +`test_h`. + +Considering the case of `test_r`: + +.. literalinclude:: ../examples/debugging-logging.out + :lines: 300-310 + +That test has no dependencies. The error in the example is that the +:func:`pytest.mark.dependency` marker is applied twice to the test. +That doesn't work in pytest, only the last invocation is effective. +As a result, the second invocation setting a name, effectively clears +the dependency list that was set in the first invocation. + +.. __: https://docs.pytest.org/en/stable/logging.html#live-logs diff --git a/doc/src/index.rst b/doc/src/index.rst index 0f034d1..d368390 100644 --- a/doc/src/index.rst +++ b/doc/src/index.rst @@ -17,6 +17,7 @@ Content of the documentation scope advanced names + debugging configuration - changelog reference + changelog diff --git a/doc/src/install.rst b/doc/src/install.rst index 9b6417a..32e4d40 100644 --- a/doc/src/install.rst +++ b/doc/src/install.rst @@ -1,12 +1,34 @@ Installation instructions ========================= +See :ref:`install-using-pip` for the short version of the install +instructions. + + System requirements ------------------- -+ Python 2.7 or 3.4 and newer. ++ Python 3.4 or newer. + `setuptools`_. -+ `pytest`_ 3.6.0 or newer. ++ `pytest`_ 3.7.0 or newer. + +Optional library packages +......................... + +These packages are not needed to install or use pytest-dependency. +They are mostly only needed by the maintainer. + ++ `git-props`_ + + This package is used to extract some metadata such as the version + number out of git, the version control system. All releases embed + that metadata in the distribution. So this package is only needed + to build out of the plain development source tree as cloned from + GitHub, but not to build a release distribution. + ++ `distutils-pytest`_ >= 0.2 + + Only needed to run the test suite. .. _install-other-packages: @@ -14,7 +36,14 @@ System requirements Interaction with other packages ------------------------------- -pytest-xdist +`pytest-order`_ + pytest-dependency is based on the assumption that dependencies are + run before the test that depends on them. If this assumption is + not satisfied in the default execution order in pytest, you may + want to have a look on pytest-order. It implements reordering of + tests and supports taking the dependencies into account. + +`pytest-xdist`_ pytest-xdist features test run parallelization, e.g. distributing tests over separate processes that run in parallel. This is based on the assumption that the tests can be run independent of each @@ -23,38 +52,79 @@ pytest-xdist you do not enable parallelization in pytest-xdist. -Download --------- +Installation +------------ -The latest release version of pytest-dependency source can be found at -PyPI, see +.. _install-using-pip: - https://pypi.python.org/pypi/pytest_dependency +Installation using pip +...................... +You can install pytest-dependency from the `Python Package Index +(PyPI)`__ using pip:: -Installation ------------- + $ pip install pytest-dependency -1. Download the sources, unpack, and change into the source directory. +Note that while installing from PyPI is convenient, there is no way to +verify the integrity of the source distribution, which may be +considered a security risk. -2. Build (optional):: +.. __: `PyPI site`_ + +Manual installation from the source distribution +................................................ + +Release distributions are published on the GitHub. Steps to manually +build from the source distribution: + +1. Download the sources. + + The `Release Page`__ offers download of the source distribution + ``pytest-dependency-X.X.tar.gz`` and a detached signature file + ``pytest-dependency-X.X.tar.gz.asc``, where the "X.X" is to be + replaced by the version number. + +2. Check the signature (optional). + + You may verify the integrity of the source distribution by checking + the signature:: + + $ gpg --verify pytest-dependency-0.5.1.tar.gz.asc + gpg: assuming signed data in 'pytest-dependency-0.5.1.tar.gz' + gpg: Signature made Fri Feb 14 21:58:30 2020 CET + gpg: using RSA key B4EB920861DF33F31B55A07C08A1264175343E6E + gpg: Good signature from "Rolf Krahl " [ultimate] + gpg: aka "Rolf Krahl " [ultimate] + gpg: aka "Rolf Krahl " [ultimate] + + The signature should be made by the key + :download:`0xB4EB920861DF33F31B55A07C08A1264175343E6E + <08A1264175343E6E.pub>`. The fingerprint of that key is:: + + B4EB 9208 61DF 33F3 1B55 A07C 08A1 2641 7534 3E6E + +3. Unpack and change into the source directory. + +4. Build (optional):: $ python setup.py build -3. Test (optional):: +5. Test (optional):: - $ python -m pytest + $ python setup.py test -4. Install:: +6. Install:: $ python setup.py install -The last step might require admin privileges in order to write into -the site-packages directory of your Python installation. - -For production use, it is always recommended to use the latest release -version from PyPI, see above. +.. __: `GitHub latest release`_ .. _setuptools: http://pypi.python.org/pypi/setuptools/ .. _pytest: http://pytest.org/ +.. _git-props: https://github.com/RKrahl/git-props +.. _distutils-pytest: https://github.com/RKrahl/distutils-pytest +.. _pytest-order: https://github.com/pytest-dev/pytest-order +.. _pytest-xdist: https://github.com/pytest-dev/pytest-xdist +.. _PyPI site: https://pypi.python.org/pypi/pytest_dependency/ +.. _GitHub latest release: https://github.com/RKrahl/pytest-dependency/releases/latest diff --git a/doc/src/scope.rst b/doc/src/scope.rst index 1e78a29..5d71802 100644 --- a/doc/src/scope.rst +++ b/doc/src/scope.rst @@ -16,8 +16,8 @@ or `'class'`. versions, all dependencies were implicitly in module scope. -Explicitely specifying the scope --------------------------------- +Explicitly specifying the scope +------------------------------- The default value for the `scope` argument is `'module'`. Thus, the very first example from Section :ref:`usage-basic` could also be diff --git a/doc/src/usage.rst b/doc/src/usage.rst index be97ef2..77838c5 100644 --- a/doc/src/usage.rst +++ b/doc/src/usage.rst @@ -21,7 +21,7 @@ test, we will get the following result: .. literalinclude:: ../examples/basic.out The first test has deliberately been set to fail to illustrate the -effect. We will get the following resuts: +effect. We will get the following results: `test_a` deliberately fails. @@ -50,7 +50,7 @@ see Section :ref:`names` for details. In some cases, it's not easy to predict the names of the node ids. For this reason, the name of the tests can be overridden by an explicit `name` argument to the marker. The names must be unique. The following example works exactly as the -last one, only the test names are explicitely set: +last one, only the test names are explicitly set: .. literalinclude:: ../examples/named.py @@ -73,6 +73,22 @@ explicit `name` argument to the :func:`pytest.mark.dependency` marker. The name of the class is prepended to the method name to form the default name for the test. +Applying the dependency marker to a class as a whole +---------------------------------------------------- + +The :func:`pytest.mark.dependency` marker may also be applied to a test +class as a whole. This has the same effect as applying that marker +with the same arguments to each method of the class individually. +Consider: + +.. literalinclude:: ../examples/mark-class.py + +The tests `TestClass::test_a` and `TestClass::test_c` will be skipped, +because they depend on `test_f`. But `TestClass::test_b` will be run, +because it is individually marked. The marker on the test method +overrides the marker on the class and thus effectively clears the +dependency list for `TestClass::test_b`. + .. _usage-parametrized: Parametrized tests @@ -94,7 +110,7 @@ Marking dependencies at runtime ------------------------------- Sometimes, dependencies of test instances are too complicated to be -formulated explicitely beforehand using the +formulated explicitly beforehand using the :func:`pytest.mark.dependency` marker. It may be easier to compile the list of dependencies of a test at run time. In such cases, the function :func:`pytest_dependency.depends` comes handy. Consider the diff --git a/python-pytest-dependency.spec b/python-pytest-dependency.spec new file mode 100644 index 0000000..5f8005d --- /dev/null +++ b/python-pytest-dependency.spec @@ -0,0 +1,60 @@ +%bcond_without tests +%global distname pytest-dependency + +%if 0%{?sle_version} >= 150500 +%global pythons python3 python311 +%else +%{?!python_module:%define python_module() python3-%{**}} +%define skip_python2 1 +%endif + +Name: python-%{distname} +Version: $version +Release: 0 +Summary: $description +License: Apache-2.0 +URL: $url +Group: Development/Languages/Python +Source: https://github.com/RKrahl/pytest-dependency/releases/download/%{version}/%{distname}-%{version}.tar.gz +BuildRequires: %{python_module base >= 3.4} +BuildRequires: %{python_module setuptools} +BuildRequires: fdupes +BuildRequires: python-rpm-macros +%if %{with tests} +BuildRequires: %{python_module distutils-pytest} +BuildRequires: %{python_module pytest >= 3.7} +%endif +Requires: python-pytest >= 3.7 +BuildArch: noarch +%python_subpackages + +%description +$long_description + + +%prep +%setup -q -n %{distname}-%{version} + + +%build +%python_build + + +%install +%python_install +%fdupes %{buildroot}%{python_sitelib} + + +%if %{with tests} +%check +%python_expand $$python setup.py test +%endif + + +%files %{python_files} +%license LICENSE.txt +%doc README.rst CHANGES.rst +%{python_sitelib}/* + + +%changelog diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 04f6903..0000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -pytest >=3.6.0 -setuptools_scm diff --git a/setup.py b/setup.py index ad74b46..f5954e8 100755 --- a/setup.py +++ b/setup.py @@ -1,4 +1,3 @@ -#! /usr/bin/python """pytest-dependency - Manage dependencies of tests This pytest plugin manages dependencies of tests. It allows to mark @@ -6,78 +5,164 @@ skipped if any of the dependencies did fail or has been skipped. """ -import distutils.log +import setuptools +from setuptools import setup +import setuptools.command.build_py +import distutils.command.sdist +import distutils.file_util +from distutils import log +from glob import glob import os -import os.path -import re +from pathlib import Path +from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE import string -from setuptools import setup -import setuptools.command.sdist as st_sdist try: - import setuptools_scm - version = setuptools_scm.get_version() - with open(".version", "wt") as f: - f.write(version) + import distutils_pytest + cmdclass = distutils_pytest.cmdclass +except (ImportError, AttributeError): + cmdclass = dict() +try: + import gitprops + release = str(gitprops.get_last_release()) + version = str(gitprops.get_version()) except (ImportError, LookupError): try: - with open(".version", "rt") as f: - version = f.read() - except (OSError, IOError): - distutils.log.warn("warning: cannot determine version number") - version = "UNKNOWN" + from _meta import release, version + except ImportError: + log.warn("warning: cannot determine version number") + release = version = "UNKNOWN" + +docstring = __doc__ + +class copy_file_mixin: + """Distutils copy_file() mixin. + Inject a custom version version of the copy_file() method that + does some substitutions on the fly into distutils command class + hierarchy. + """ + Subst_srcs = {"src/pytest_dependency.py"} + Subst = {'DOC': docstring, 'VERSION': version} + def copy_file(self, infile, outfile, + preserve_mode=1, preserve_times=1, link=None, level=1): + if infile in self.Subst_srcs: + infile = Path(infile) + outfile = Path(outfile) + if outfile.name == infile.name: + log.info("copying (with substitutions) %s -> %s", + infile, outfile.parent) + else: + log.info("copying (with substitutions) %s -> %s", + infile, outfile) + if not self.dry_run: + st = infile.stat() + try: + outfile.unlink() + except FileNotFoundError: + pass + with infile.open("rt") as sf, outfile.open("wt") as df: + df.write(string.Template(sf.read()).substitute(self.Subst)) + if preserve_times: + os.utime(str(outfile), (st[ST_ATIME], st[ST_MTIME])) + if preserve_mode: + outfile.chmod(S_IMODE(st[ST_MODE])) + return (str(outfile), 1) + else: + return distutils.file_util.copy_file(infile, outfile, + preserve_mode, preserve_times, + not self.force, link, + dry_run=self.dry_run) -class sdist(st_sdist.sdist): - def make_release_tree(self, base_dir, files): - st_sdist.sdist.make_release_tree(self, base_dir, files) - if not self.dry_run: - src = "pytest_dependency.py" - dest = os.path.join(base_dir, src) - if hasattr(os, 'link') and os.path.exists(dest): - os.unlink(dest) - subst = {'DOC': __doc__, 'VERSION': version} - with open(src, "rt") as sf, open(dest, "wt") as df: - df.write(string.Template(sf.read()).substitute(subst)) +class meta(setuptools.Command): + description = "generate meta files" + user_options = [] + meta_template = ''' +release = "%(release)s" +version = "%(version)s" +''' + def initialize_options(self): + pass + def finalize_options(self): + pass + def run(self): + version = self.distribution.get_version() + log.info("version: %s", version) + values = { + 'release': release, + 'version': version, + } + with Path("_meta.py").open("wt") as f: + print(self.meta_template % values, file=f) +# Note: Do not use setuptools for making the source distribution, +# rather use the good old distutils instead. +# Rationale: https://rhodesmill.org/brandon/2009/eby-magic/ +class sdist(copy_file_mixin, distutils.command.sdist.sdist): + def run(self): + self.run_command('meta') + super().run() + subst = { + "version": self.distribution.get_version(), + "url": self.distribution.get_url(), + "description": docstring.split("\n")[0], + "long_description": docstring.split("\n", maxsplit=2)[2].strip(), + } + for spec in glob("*.spec"): + with Path(spec).open('rt') as inf: + with Path(self.dist_dir, spec).open('wt') as outf: + outf.write(string.Template(inf.read()).substitute(subst)) + +class build_py(copy_file_mixin, setuptools.command.build_py.build_py): + def run(self): + self.run_command('meta') + super().run() + + +with Path("README.rst").open("rt", encoding="utf8") as f: + readme = f.read() setup( - name='pytest-dependency', - version=version, - description='Manage dependencies of tests', - author='Rolf Krahl', - author_email='rolf@rotkraut.de', - maintainer='Rolf Krahl', - maintainer_email='rolf@rotkraut.de', - url='https://github.com/RKrahl/pytest-dependency', - license='Apache Software License 2.0', - long_description=__doc__, - project_urls={ - 'Documentation': 'https://pytest-dependency.readthedocs.io/', - 'Source Code': 'https://github.com/RKrahl/pytest-dependency', - }, - py_modules=['pytest_dependency'], - install_requires=['pytest >= 3.6.0'], - classifiers=[ - 'Development Status :: 4 - Beta', - 'Framework :: Pytest', - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Testing', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Operating System :: OS Independent', - 'License :: OSI Approved :: Apache Software License', + name = "pytest-dependency", + version = version, + description = "Manage dependencies of tests", + long_description = readme, + long_description_content_type = "text/x-rst", + url = "https://github.com/RKrahl/pytest-dependency", + author = "Rolf Krahl", + author_email = "rolf@rotkraut.de", + license = "Apache-2.0", + classifiers = [ + "Development Status :: 4 - Beta", + "Framework :: Pytest", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Testing", ], - entry_points={ - 'pytest11': [ - 'dependency = pytest_dependency', + project_urls = dict( + Documentation="https://pytest-dependency.readthedocs.io/", + Source="https://github.com/RKrahl/pytest-dependency", + Download=("https://github.com/RKrahl/pytest-dependency/releases/%s/" + % release), + ), + package_dir = {"": "src"}, + python_requires = ">=3.4", + py_modules = ["pytest_dependency"], + install_requires = ["setuptools", "pytest >= 3.7.0"], + entry_points = { + "pytest11": [ + "dependency = pytest_dependency", ], }, - cmdclass = {'sdist': sdist}, + cmdclass = dict(cmdclass, build_py=build_py, sdist=sdist, meta=meta), ) diff --git a/pytest_dependency.py b/src/pytest_dependency.py similarity index 89% rename from pytest_dependency.py rename to src/pytest_dependency.py index 83362ae..43224ee 100644 --- a/pytest_dependency.py +++ b/src/pytest_dependency.py @@ -2,26 +2,15 @@ __version__ = "$VERSION" +import logging import pytest +logger = logging.getLogger(__name__) + _automark = False _ignore_unknown = False -def _get_bool(value): - """Evaluate string representation of a boolean value. - """ - if value: - if value.lower() in ["0", "no", "n", "false", "f", "off"]: - return False - elif value.lower() in ["1", "yes", "y", "true", "t", "on"]: - return True - else: - raise ValueError("Invalid truth value '%s'" % value) - else: - return False - - class DependencyItemStatus(object): """Status of a test item in a dependency manager. """ @@ -85,16 +74,25 @@ def addResult(self, item, name, rep): raise RuntimeError("Internal error: invalid scope '%s'" % self.scope) status = self.results.setdefault(name, DependencyItemStatus()) + logger.debug("register %s %s %s in %s scope", + rep.when, name, rep.outcome, self.scope) status.addResult(rep) def checkDepend(self, depends, item): + logger.debug("check dependencies of %s in %s scope ...", + item.name, self.scope) for i in depends: if i in self.results: if self.results[i].isSuccess(): + logger.debug("... %s succeeded", i) continue + else: + logger.debug("... %s has not succeeded", i) else: + logger.debug("... %s is unknown", i) if _ignore_unknown: continue + logger.info("skip %s because it depends on %s", item.name, i) pytest.skip("%s depends on %s" % (item.name, i)) @@ -130,7 +128,7 @@ def depends(request, other, scope='module'): def pytest_addoption(parser): parser.addini("automark_dependency", "Add the dependency marker to all tests automatically", - default=False) + type="bool", default=False) parser.addoption("--ignore-unknown-dependency", action="store_true", default=False, help="ignore dependencies whose outcome is not known") @@ -138,7 +136,7 @@ def pytest_addoption(parser): def pytest_configure(config): global _automark, _ignore_unknown - _automark = _get_bool(config.getini("automark_dependency")) + _automark = config.getini("automark_dependency") _ignore_unknown = config.getoption("--ignore-unknown-dependency") config.addinivalue_line("markers", "dependency(name=None, depends=[]): " diff --git a/tests/conftest.py b/tests/conftest.py index 26cdc0b..388dcc7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,15 @@ +from pathlib import Path import pytest pytest_plugins = "pytester" +example_dir = (Path(__file__).parent / "../doc/examples").resolve() + +def get_example(fname): + path = example_dir / fname + assert path.is_file() + return path + @pytest.fixture def ctestdir(testdir): diff --git a/tests/pytest.ini b/tests/pytest.ini index 1f80c58..7cec0d1 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -1,2 +1,2 @@ [pytest] -minversion = 3.6 +minversion = 3.7 diff --git a/tests/test_02_simple_dependency.py b/tests/test_02_simple_dependency.py index a010f80..3c45f2c 100644 --- a/tests/test_02_simple_dependency.py +++ b/tests/test_02_simple_dependency.py @@ -29,11 +29,11 @@ def test_d(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=3, skipped=1, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a SKIPPED - *::test_b PASSED - *::test_c PASSED - *::test_d PASSED + result.stdout.re_match_lines(r""" + .*::test_a SKIPPED(?:\s+\(.*\))? + .*::test_b PASSED + .*::test_c PASSED + .*::test_d PASSED """) @@ -62,11 +62,11 @@ def test_d(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=1, skipped=3, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b SKIPPED - *::test_c SKIPPED - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b SKIPPED(?:\s+\(.*\))? + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d SKIPPED(?:\s+\(.*\))? """) @@ -95,11 +95,11 @@ def test_d(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=1, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b FAILED - *::test_c SKIPPED - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b FAILED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d SKIPPED(?:\s+\(.*\))? """) @@ -127,11 +127,11 @@ def test_d(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=1, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b FAILED - *::test_c SKIPPED - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b FAILED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d SKIPPED(?:\s+\(.*\))? """) @@ -162,8 +162,8 @@ def test_d(): """) result = ctestdir.runpytest("--verbose", "test_explicit_select.py::test_d") result.assert_outcomes(passed=0, skipped=1, failed=0) - result.stdout.fnmatch_lines(""" - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_d SKIPPED(?:\s+\(.*\))? """) @@ -195,9 +195,9 @@ def test_d(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=3, skipped=1, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b PASSED - *::test_c PASSED - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b PASSED + .*::test_c PASSED + .*::test_d SKIPPED(?:\s+\(.*\))? """) diff --git a/tests/test_03_class.py b/tests/test_03_class.py index 2ebf811..64bdb6d 100644 --- a/tests/test_03_class.py +++ b/tests/test_03_class.py @@ -36,18 +36,18 @@ def test_e(self): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=2, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" - *::TestClass::test_a FAILED - *::TestClass::test_b PASSED - *::TestClass::test_c SKIPPED - *::TestClass::test_d PASSED - *::TestClass::test_e SKIPPED + result.stdout.re_match_lines(r""" + .*::TestClass::test_a FAILED + .*::TestClass::test_b PASSED + .*::TestClass::test_c SKIPPED(?:\s+\(.*\))? + .*::TestClass::test_d PASSED + .*::TestClass::test_e SKIPPED(?:\s+\(.*\))? """) def test_class_simple_named(ctestdir): """Mostly the same as test_class_simple(), but name the test methods - now explicitely. + now explicitly. """ ctestdir.makepyfile(""" import pytest @@ -76,12 +76,12 @@ def test_e(self): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=2, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" - *::TestClassNamed::test_a FAILED - *::TestClassNamed::test_b PASSED - *::TestClassNamed::test_c SKIPPED - *::TestClassNamed::test_d PASSED - *::TestClassNamed::test_e SKIPPED + result.stdout.re_match_lines(r""" + .*::TestClassNamed::test_a FAILED + .*::TestClassNamed::test_b PASSED + .*::TestClassNamed::test_c SKIPPED(?:\s+\(.*\))? + .*::TestClassNamed::test_d PASSED + .*::TestClassNamed::test_e SKIPPED(?:\s+\(.*\))? """) @@ -114,8 +114,8 @@ def test_b(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=1, skipped=1, failed=1) - result.stdout.fnmatch_lines(""" - *::test_a FAILED - *::TestClass::test_a PASSED - *::test_b SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a FAILED + .*::TestClass::test_a PASSED + .*::test_b SKIPPED(?:\s+\(.*\))? """) diff --git a/tests/test_03_multiple_dependency.py b/tests/test_03_multiple_dependency.py index aaea86b..a5e585a 100644 --- a/tests/test_03_multiple_dependency.py +++ b/tests/test_03_multiple_dependency.py @@ -54,16 +54,16 @@ def test_k(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=5, skipped=5, failed=1) - result.stdout.fnmatch_lines(""" - *::test_a SKIPPED - *::test_b FAILED - *::test_c PASSED - *::test_d PASSED - *::test_e PASSED - *::test_f SKIPPED - *::test_g SKIPPED - *::test_h PASSED - *::test_i SKIPPED - *::test_j PASSED - *::test_k SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a SKIPPED(?:\s+\(.*\))? + .*::test_b FAILED + .*::test_c PASSED + .*::test_d PASSED + .*::test_e PASSED + .*::test_f SKIPPED(?:\s+\(.*\))? + .*::test_g SKIPPED(?:\s+\(.*\))? + .*::test_h PASSED + .*::test_i SKIPPED(?:\s+\(.*\))? + .*::test_j PASSED + .*::test_k SKIPPED(?:\s+\(.*\))? """) diff --git a/tests/test_03_param.py b/tests/test_03_param.py index 8c0e53d..62cb5ba 100644 --- a/tests/test_03_param.py +++ b/tests/test_03_param.py @@ -4,6 +4,38 @@ import pytest +def test_simple_params(ctestdir): + """Simple test for a dependency on a parametrized test. + + This example has been used in the discussion of PR #43. + """ + ctestdir.makepyfile(""" + import pytest + + _md = pytest.mark.dependency + + @pytest.mark.parametrize("x", [ 0, 1 ]) + @pytest.mark.dependency() + def test_a(x): + assert x == 0 + + @pytest.mark.parametrize("x", [ + pytest.param(0, marks=_md(depends=["test_a[0]"])), + pytest.param(1, marks=_md(depends=["test_a[1]"])), + ]) + def test_b(x): + pass + """) + result = ctestdir.runpytest("--verbose") + result.assert_outcomes(passed=2, skipped=1, failed=1) + result.stdout.re_match_lines(r""" + .*::test_a\[0\] PASSED + .*::test_a\[1\] FAILED + .*::test_b\[0\] PASSED + .*::test_b\[1\] SKIPPED(?:\s+\(.*\))? + """) + + def test_multiple(ctestdir): ctestdir.makepyfile(""" import pytest @@ -40,18 +72,18 @@ def test_c(w): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=7, skipped=5, failed=1) - result.stdout.fnmatch_lines(""" - *::test_a?0-0? PASSED - *::test_a?0-1? PASSED - *::test_a?1-0? PASSED - *::test_a?1-1? FAILED - *::test_b?1-2? PASSED - *::test_b?1-3? PASSED - *::test_b?1-4? SKIPPED - *::test_b?2-3? PASSED - *::test_b?2-4? SKIPPED - *::test_b?3-4? SKIPPED - *::test_c?1? SKIPPED - *::test_c?2? SKIPPED - *::test_c?3? PASSED + result.stdout.re_match_lines(r""" + .*::test_a\[0-0\] PASSED + .*::test_a\[0-1\] PASSED + .*::test_a\[1-0\] PASSED + .*::test_a\[1-1\] FAILED + .*::test_b\[1-2\] PASSED + .*::test_b\[1-3\] PASSED + .*::test_b\[1-4\] SKIPPED(?:\s+\(.*\))? + .*::test_b\[2-3\] PASSED + .*::test_b\[2-4\] SKIPPED(?:\s+\(.*\))? + .*::test_b\[3-4\] SKIPPED(?:\s+\(.*\))? + .*::test_c\[1\] SKIPPED(?:\s+\(.*\))? + .*::test_c\[2\] SKIPPED(?:\s+\(.*\))? + .*::test_c\[3\] PASSED """) diff --git a/tests/test_03_runtime.py b/tests/test_03_runtime.py index fd35b2b..fb23e2c 100644 --- a/tests/test_03_runtime.py +++ b/tests/test_03_runtime.py @@ -32,9 +32,9 @@ def test_d(request): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=1, skipped=3, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b SKIPPED - *::test_c SKIPPED - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b SKIPPED(?:\s+\(.*\))? + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d SKIPPED(?:\s+\(.*\))? """) diff --git a/tests/test_03_scope.py b/tests/test_03_scope.py index ba28377..17efe82 100644 --- a/tests/test_03_scope.py +++ b/tests/test_03_scope.py @@ -5,7 +5,7 @@ def test_scope_module(ctestdir): - """One single module, module scope is explicitely set in the + """One single module, module scope is explicitly set in the pytest.mark.dependency() marker. """ ctestdir.makepyfile(""" @@ -33,12 +33,12 @@ def test_e(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=2, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_module.py::test_a FAILED test_scope_module.py::test_b PASSED - test_scope_module.py::test_c SKIPPED + test_scope_module.py::test_c SKIPPED(?:\s+\(.*\))? test_scope_module.py::test_d PASSED - test_scope_module.py::test_e SKIPPED + test_scope_module.py::test_e SKIPPED(?:\s+\(.*\))? """) def test_scope_session(ctestdir): @@ -102,14 +102,14 @@ def test_h(): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=6, skipped=1, failed=2) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_session_01.py::test_a PASSED test_scope_session_01.py::test_b FAILED test_scope_session_01.py::test_c PASSED test_scope_session_01.py::TestClass::test_b PASSED test_scope_session_02.py::test_a FAILED test_scope_session_02.py::test_e PASSED - test_scope_session_02.py::test_f SKIPPED + test_scope_session_02.py::test_f SKIPPED(?:\s+\(.*\))? test_scope_session_02.py::test_g PASSED test_scope_session_02.py::test_h PASSED """) @@ -174,14 +174,14 @@ def test_h(): ctestdir.makepyfile(**srcs) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=4, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_package_a/test_01.py::test_a PASSED test_scope_package_b/test_02.py::test_c PASSED test_scope_package_b/test_02.py::test_d FAILED test_scope_package_b/test_03.py::test_e PASSED - test_scope_package_b/test_03.py::test_f SKIPPED + test_scope_package_b/test_03.py::test_f SKIPPED(?:\s+\(.*\))? test_scope_package_b/test_03.py::test_g PASSED - test_scope_package_b/test_03.py::test_h SKIPPED + test_scope_package_b/test_03.py::test_h SKIPPED(?:\s+\(.*\))? """) def test_scope_class(ctestdir): @@ -236,17 +236,17 @@ def test_h(self): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=5, skipped=3, failed=2) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_class.py::test_a FAILED test_scope_class.py::test_b PASSED test_scope_class.py::TestClass1::test_c PASSED test_scope_class.py::TestClass2::test_a PASSED test_scope_class.py::TestClass2::test_b FAILED - test_scope_class.py::TestClass2::test_d SKIPPED + test_scope_class.py::TestClass2::test_d SKIPPED(?:\s+\(.*\))? test_scope_class.py::TestClass2::test_e PASSED test_scope_class.py::TestClass2::test_f PASSED - test_scope_class.py::TestClass2::test_g SKIPPED - test_scope_class.py::TestClass2::test_h SKIPPED + test_scope_class.py::TestClass2::test_g SKIPPED(?:\s+\(.*\))? + test_scope_class.py::TestClass2::test_h SKIPPED(?:\s+\(.*\))? """) def test_scope_nodeid(ctestdir): @@ -360,26 +360,26 @@ def test_o(self): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=7, skipped=8, failed=0) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_nodeid.py::test_a PASSED test_scope_nodeid.py::test_b PASSED - test_scope_nodeid.py::test_c SKIPPED - test_scope_nodeid.py::test_d SKIPPED + test_scope_nodeid.py::test_c SKIPPED(?:\s+\(.*\))? + test_scope_nodeid.py::test_d SKIPPED(?:\s+\(.*\))? test_scope_nodeid.py::test_e PASSED test_scope_nodeid.py::TestClass::test_f PASSED test_scope_nodeid.py::TestClass::test_g PASSED - test_scope_nodeid.py::TestClass::test_h SKIPPED - test_scope_nodeid.py::TestClass::test_i SKIPPED - test_scope_nodeid.py::TestClass::test_j SKIPPED + test_scope_nodeid.py::TestClass::test_h SKIPPED(?:\s+\(.*\))? + test_scope_nodeid.py::TestClass::test_i SKIPPED(?:\s+\(.*\))? + test_scope_nodeid.py::TestClass::test_j SKIPPED(?:\s+\(.*\))? test_scope_nodeid.py::TestClass::test_k PASSED - test_scope_nodeid.py::TestClass::test_l SKIPPED - test_scope_nodeid.py::TestClass::test_m SKIPPED - test_scope_nodeid.py::TestClass::test_n SKIPPED + test_scope_nodeid.py::TestClass::test_l SKIPPED(?:\s+\(.*\))? + test_scope_nodeid.py::TestClass::test_m SKIPPED(?:\s+\(.*\))? + test_scope_nodeid.py::TestClass::test_n SKIPPED(?:\s+\(.*\))? test_scope_nodeid.py::TestClass::test_o PASSED """) def test_scope_named(ctestdir): - """Explicitely named tests are always referenced by that name, + """Explicitly named tests are always referenced by that name, regardless of the scope. """ ctestdir.makepyfile(""" @@ -467,19 +467,19 @@ def test_l(self): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=7, skipped=5, failed=0) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_named.py::test_a PASSED test_scope_named.py::test_b PASSED - test_scope_named.py::test_c SKIPPED + test_scope_named.py::test_c SKIPPED(?:\s+\(.*\))? test_scope_named.py::test_d PASSED - test_scope_named.py::test_e SKIPPED + test_scope_named.py::test_e SKIPPED(?:\s+\(.*\))? test_scope_named.py::TestClass::test_f PASSED test_scope_named.py::TestClass::test_g PASSED - test_scope_named.py::TestClass::test_h SKIPPED + test_scope_named.py::TestClass::test_h SKIPPED(?:\s+\(.*\))? test_scope_named.py::TestClass::test_i PASSED - test_scope_named.py::TestClass::test_j SKIPPED + test_scope_named.py::TestClass::test_j SKIPPED(?:\s+\(.*\))? test_scope_named.py::TestClass::test_k PASSED - test_scope_named.py::TestClass::test_l SKIPPED + test_scope_named.py::TestClass::test_l SKIPPED(?:\s+\(.*\))? """) def test_scope_dependsfunc(ctestdir): @@ -578,7 +578,7 @@ def test_d(self, request): """) result = ctestdir.runpytest("--verbose") result.assert_outcomes(passed=10, skipped=3, failed=3) - result.stdout.fnmatch_lines(""" + result.stdout.re_match_lines(r""" test_scope_dependsfunc_01.py::test_a PASSED test_scope_dependsfunc_01.py::test_b FAILED test_scope_dependsfunc_01.py::test_c PASSED @@ -586,13 +586,13 @@ def test_d(self, request): test_scope_dependsfunc_02.py::test_a FAILED test_scope_dependsfunc_02.py::test_b PASSED test_scope_dependsfunc_02.py::test_e PASSED - test_scope_dependsfunc_02.py::test_f SKIPPED + test_scope_dependsfunc_02.py::test_f SKIPPED(?:\s+\(.*\))? test_scope_dependsfunc_02.py::test_g PASSED test_scope_dependsfunc_02.py::test_h PASSED - test_scope_dependsfunc_02.py::test_i SKIPPED + test_scope_dependsfunc_02.py::test_i SKIPPED(?:\s+\(.*\))? test_scope_dependsfunc_02.py::test_j PASSED test_scope_dependsfunc_02.py::TestClass::test_a PASSED test_scope_dependsfunc_02.py::TestClass::test_b FAILED test_scope_dependsfunc_02.py::TestClass::test_c PASSED - test_scope_dependsfunc_02.py::TestClass::test_d SKIPPED + test_scope_dependsfunc_02.py::TestClass::test_d SKIPPED(?:\s+\(.*\))? """) diff --git a/tests/test_03_skipmsgs.py b/tests/test_03_skipmsgs.py index d501285..bfdc833 100644 --- a/tests/test_03_skipmsgs.py +++ b/tests/test_03_skipmsgs.py @@ -29,11 +29,11 @@ def test_d(): """) result = ctestdir.runpytest("--verbose", "-rs") result.assert_outcomes(passed=1, skipped=2, failed=1) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b FAILED - *::test_c SKIPPED - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b FAILED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d SKIPPED(?:\s+\(.*\))? """) result.stdout.fnmatch_lines_random(""" SKIP* test_c depends on test_b diff --git a/tests/test_04_automark.py b/tests/test_04_automark.py index bcc2810..09fbc17 100644 --- a/tests/test_04_automark.py +++ b/tests/test_04_automark.py @@ -23,13 +23,16 @@ def test_b(): """) result = ctestdir.runpytest("--verbose", "-rs") result.assert_outcomes(passed=1, skipped=1, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b SKIPPED(?:\s+\(.*\))? """) -def test_set_false(ctestdir): +@pytest.mark.parametrize( + "false_value", ["0", "no", "n", "False", "false", "f", "off"] +) +def test_set_false(ctestdir, false_value): """A pytest.ini is present, automark_dependency is set to false. Since automark_dependency is set to false and test_a is not @@ -38,9 +41,9 @@ def test_set_false(ctestdir): """ ctestdir.makefile('.ini', pytest=""" [pytest] - automark_dependency = false + automark_dependency = %s console_output_style = classic - """) + """ % false_value) ctestdir.makepyfile(""" import pytest @@ -53,13 +56,16 @@ def test_b(): """) result = ctestdir.runpytest("--verbose", "-rs") result.assert_outcomes(passed=1, skipped=1, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b SKIPPED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b SKIPPED(?:\s+\(.*\))? """) -def test_set_true(ctestdir): +@pytest.mark.parametrize( + "true_value", ["1", "yes", "y", "True", "true", "t", "on"] +) +def test_set_true(ctestdir, true_value): """A pytest.ini is present, automark_dependency is set to false. Since automark_dependency is set to true, the outcome of test_a @@ -68,9 +74,9 @@ def test_set_true(ctestdir): """ ctestdir.makefile('.ini', pytest=""" [pytest] - automark_dependency = true + automark_dependency = %s console_output_style = classic - """) + """ % true_value) ctestdir.makepyfile(""" import pytest @@ -83,7 +89,7 @@ def test_b(): """) result = ctestdir.runpytest("--verbose", "-rs") result.assert_outcomes(passed=2, skipped=0, failed=0) - result.stdout.fnmatch_lines(""" - *::test_a PASSED - *::test_b PASSED + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b PASSED """) diff --git a/tests/test_04_ignore_unknown.py b/tests/test_04_ignore_unknown.py index 3cee600..bc145e5 100644 --- a/tests/test_04_ignore_unknown.py +++ b/tests/test_04_ignore_unknown.py @@ -32,8 +32,8 @@ def test_d(): """) result = ctestdir.runpytest("--verbose", "test_no_ignore.py::test_d") result.assert_outcomes(passed=0, skipped=1, failed=0) - result.stdout.fnmatch_lines(""" - *::test_d SKIPPED + result.stdout.re_match_lines(r""" + .*::test_d SKIPPED(?:\s+\(.*\))? """) @@ -67,6 +67,6 @@ def test_d(): result = ctestdir.runpytest("--verbose", "--ignore-unknown-dependency", "test_ignore.py::test_d") result.assert_outcomes(passed=1, skipped=0, failed=0) - result.stdout.fnmatch_lines(""" - *::test_d PASSED + result.stdout.re_match_lines(r""" + .*::test_d PASSED """) diff --git a/tests/test_09_examples_advanced.py b/tests/test_09_examples_advanced.py new file mode 100644 index 0000000..bd6e9c8 --- /dev/null +++ b/tests/test_09_examples_advanced.py @@ -0,0 +1,167 @@ +"""Test the included examples. +""" + +import pytest +from conftest import get_example + + +def test_dyn_parametrized(ctestdir): + """Dynamic compilation of marked parameters + """ + with get_example("dyn-parametrized.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=11, skipped=1, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=11, skipped=1, failed=0) + result.stdout.re_match_lines(r""" + .*::test_child\[c0\] PASSED + .*::test_child\[c1\] PASSED + .*::test_child\[c2\] PASSED + .*::test_child\[c3\] PASSED + .*::test_child\[c4\] PASSED + .*::test_child\[c5\] PASSED + .*::test_child\[c6\] PASSED + .*::test_child\[c7\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_child\[c8\] PASSED + .*::test_parent\[p0\] PASSED + .*::test_parent\[p1\] PASSED + .*::test_parent\[p2\] PASSED + .*::test_parent\[p3\] SKIPPED(?:\s+\(.*\))? + """) + + +def test_group_fixture1(ctestdir): + """Grouping tests using fixtures 1 + """ + with get_example("group-fixture.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=16, skipped=1, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=16, skipped=1, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a\[1\] PASSED + .*::test_b\[1\] PASSED + .*::test_a\[2\] PASSED + .*::test_b\[2\] PASSED + .*::test_a\[3\] PASSED + .*::test_b\[3\] PASSED + .*::test_a\[4\] PASSED + .*::test_b\[4\] PASSED + .*::test_a\[5\] PASSED + .*::test_b\[5\] PASSED + .*::test_a\[6\] PASSED + .*::test_b\[6\] PASSED + .*::test_a\[7\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_b\[7\] SKIPPED(?:\s+\(.*\))? + .*::test_a\[8\] PASSED + .*::test_b\[8\] PASSED + .*::test_a\[9\] PASSED + .*::test_b\[9\] PASSED + """) + + +def test_group_fixture2(ctestdir): + """Grouping tests using fixtures 2 + """ + with get_example("group-fixture2.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=24, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=24, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a\[1\] PASSED + .*::test_b\[1\] PASSED + .*::test_c\[1\] PASSED + .*::test_a\[2\] PASSED + .*::test_b\[2\] PASSED + .*::test_c\[2\] PASSED + .*::test_a\[3\] PASSED + .*::test_b\[3\] PASSED + .*::test_c\[3\] PASSED + .*::test_a\[4\] PASSED + .*::test_b\[4\] PASSED + .*::test_c\[4\] PASSED + .*::test_a\[5\] PASSED + .*::test_b\[5\] PASSED + .*::test_c\[5\] PASSED + .*::test_a\[6\] PASSED + .*::test_b\[6\] PASSED + .*::test_c\[6\] PASSED + .*::test_a\[7\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_b\[7\] SKIPPED(?:\s+\(.*\))? + .*::test_c\[7\] SKIPPED(?:\s+\(.*\))? + .*::test_a\[8\] PASSED + .*::test_b\[8\] PASSED + .*::test_c\[8\] PASSED + .*::test_a\[9\] PASSED + .*::test_b\[9\] PASSED + .*::test_c\[9\] PASSED + """) + + +def test_all_params(ctestdir): + """Depend on all instances of a parametrized test at once + """ + with get_example("all_params.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=20, skipped=3, failed=0, xfailed=3) + except TypeError: + result.assert_outcomes(passed=20, skipped=3, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a\[0\] PASSED + .*::test_a\[1\] PASSED + .*::test_a\[2\] PASSED + .*::test_a\[3\] PASSED + .*::test_a\[4\] PASSED + .*::test_a\[5\] PASSED + .*::test_a\[6\] PASSED + .*::test_a\[7\] PASSED + .*::test_a\[8\] PASSED + .*::test_a\[9\] PASSED + .*::test_a\[10\] PASSED + .*::test_a\[11\] PASSED + .*::test_a\[12\] PASSED + .*::test_a\[13\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_a\[14\] PASSED + .*::test_a\[15\] PASSED + .*::test_a\[16\] PASSED + .*::test_b SKIPPED(?:\s+\(.*\))? + .*::test_c\[0-2\] PASSED + .*::test_c\[2-3\] PASSED + .*::test_c\[4-4\] PASSED + .*::test_c\[6-5\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_d SKIPPED(?:\s+\(.*\))? + .*::test_e\[abc\] PASSED + .*::test_e\[def\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_f SKIPPED(?:\s+\(.*\))? + """) + + +def test_or_dependency(ctestdir): + """Logical combinations of dependencies + """ + with get_example("or_dependency.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=5, skipped=1, failed=0, xfailed=2) + except TypeError: + result.assert_outcomes(passed=5, skipped=1, failed=0) + result.stdout.re_match_lines(r""" + .*::test_ap PASSED + .*::test_ax (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_bp PASSED + .*::test_bx (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d PASSED + .*::test_e PASSED + .*::test_f PASSED + """) diff --git a/tests/test_09_examples_debugging.py b/tests/test_09_examples_debugging.py new file mode 100644 index 0000000..d27f447 --- /dev/null +++ b/tests/test_09_examples_debugging.py @@ -0,0 +1,48 @@ +"""Test the included examples. +""" + +import pytest +from conftest import get_example + + +def test_debugging(ctestdir): + """Debugging example + """ + # The debugging example requires the enum module which is has been + # added to the standard library in Python 3.4. Skip this test if + # the module is not available. + _ = pytest.importorskip("enum") + with get_example("debugging.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=12, skipped=11, failed=0, xfailed=2) + except TypeError: + result.assert_outcomes(passed=12, skipped=11, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_b PASSED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d PASSED + .*::test_e SKIPPED(?:\s+\(.*\))? + .*::TestClass::test_a PASSED + .*::TestClass::test_b (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::TestClass::test_c PASSED + .*::test_colors\[RED\] PASSED + .*::test_colors\[GREEN\] PASSED + .*::test_colors\[BLUE\] PASSED + .*::test_multicolored SKIPPED(?:\s+\(.*\))? + .*::test_alert SKIPPED(?:\s+\(.*\))? + .*::test_g SKIPPED(?:\s+\(.*\))? + .*::test_h PASSED + .*::test_k SKIPPED(?:\s+\(.*\))? + .*::test_l\[0\] PASSED + .*::test_q\[0\] SKIPPED(?:\s+\(.*\))? + .*::test_l\[1\] PASSED + .*::test_q\[1\] SKIPPED(?:\s+\(.*\))? + .*::test_m SKIPPED(?:\s+\(.*\))? + .*::test_o SKIPPED(?:\s+\(.*\))? + .*::test_p PASSED + .*::test_r PASSED + .*::test_s SKIPPED(?:\s+\(.*\))? + """) diff --git a/tests/test_09_examples_names.py b/tests/test_09_examples_names.py new file mode 100644 index 0000000..7d45ba7 --- /dev/null +++ b/tests/test_09_examples_names.py @@ -0,0 +1,26 @@ +"""Test the included examples. +""" + +import pytest +from conftest import get_example + + +def test_nodeid(ctestdir): + """Node ids + """ + with get_example("nodeid.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=6, skipped=0, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=6, skipped=0, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b\[7-True\] PASSED + .*::test_b\[0-False\] PASSED + .*::test_b\[-1-False\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::TestClass::test_c PASSED + .*::TestClass::test_d\[order\] PASSED + .*::TestClass::test_d\[disorder\] PASSED + """) diff --git a/tests/test_09_examples_scope.py b/tests/test_09_examples_scope.py new file mode 100644 index 0000000..7975e5a --- /dev/null +++ b/tests/test_09_examples_scope.py @@ -0,0 +1,73 @@ +"""Test the included examples. +""" + +from pathlib import Path +import pytest +from conftest import get_example + + +def test_scope_module(ctestdir): + """Explicitly specifying the scope + """ + with get_example("scope_module.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=2, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=2, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_b PASSED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d PASSED + .*::test_e SKIPPED(?:\s+\(.*\))? + """) + + +def test_scope_session(ctestdir): + """Dependencies in session scope + """ + subdir = Path(str(ctestdir.tmpdir)) / "tests" + subdir.mkdir() + with get_example("scope_session_mod_01.py").open("rt") as sf: + with (subdir / "test_mod_01.py").open("wt") as df: + df.write(sf.read()) + with get_example("scope_session_mod_02.py").open("rt") as sf: + with (subdir / "test_mod_02.py").open("wt") as df: + df.write(sf.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=5, skipped=1, failed=0, xfailed=2) + except TypeError: + result.assert_outcomes(passed=5, skipped=1, failed=0) + result.stdout.re_match_lines(r""" + tests/test_mod_01.py::test_a PASSED + tests/test_mod_01.py::test_b (?:XFAIL(?:\s+\(.*\))?|xfail) + tests/test_mod_01.py::test_c PASSED + tests/test_mod_01.py::TestClass::test_b PASSED + tests/test_mod_02.py::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + tests/test_mod_02.py::test_e PASSED + tests/test_mod_02.py::test_f SKIPPED(?:\s+\(.*\))? + tests/test_mod_02.py::test_g PASSED + """) + + +def test_scope_class(ctestdir): + """The class scope + """ + with get_example("scope_class.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=3, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=3, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::TestClass1::test_b PASSED + .*::TestClass2::test_a PASSED + .*::TestClass2::test_c SKIPPED(?:\s+\(.*\))? + .*::TestClass2::test_d PASSED + .*::TestClass2::test_e SKIPPED(?:\s+\(.*\))? + """) diff --git a/tests/test_09_examples_usage.py b/tests/test_09_examples_usage.py new file mode 100644 index 0000000..6e68cce --- /dev/null +++ b/tests/test_09_examples_usage.py @@ -0,0 +1,130 @@ +"""Test the included examples. +""" + +import pytest +from conftest import get_example + + +def test_basic(ctestdir): + """Basic usage + """ + with get_example("basic.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=2, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=2, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_b PASSED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d PASSED + .*::test_e SKIPPED(?:\s+\(.*\))? + """) + + +def test_named(ctestdir): + """Naming tests + """ + with get_example("named.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=2, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=2, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_b PASSED + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d PASSED + .*::test_e SKIPPED(?:\s+\(.*\))? + """) + + +def test_testclass(ctestdir): + """Using test classes + """ + with get_example("testclass.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=4, skipped=4, failed=0, xfailed=2) + except TypeError: + result.assert_outcomes(passed=4, skipped=4, failed=0) + result.stdout.re_match_lines(r""" + .*::TestClass::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::TestClass::test_b PASSED + .*::TestClass::test_c SKIPPED(?:\s+\(.*\))? + .*::TestClass::test_d PASSED + .*::TestClass::test_e SKIPPED(?:\s+\(.*\))? + .*::TestClassNamed::test_a (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::TestClassNamed::test_b PASSED + .*::TestClassNamed::test_c SKIPPED(?:\s+\(.*\))? + .*::TestClassNamed::test_d PASSED + .*::TestClassNamed::test_e SKIPPED(?:\s+\(.*\))? + """) + + +def test_mark_class(ctestdir): + """Applying the dependency marker to a class as a whole + """ + with get_example("mark-class.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=1, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=1, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_f (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::TestClass::test_a SKIPPED(?:\s+\(.*\))? + .*::TestClass::test_b PASSED + .*::TestClass::test_c SKIPPED(?:\s+\(.*\))? + """) + + +def test_parametrized(ctestdir): + """Parametrized tests + """ + with get_example("parametrized.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=7, skipped=5, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=7, skipped=5, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a\[0-0\] PASSED + .*::test_a\[0-1\] (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_a\[1-0\] PASSED + .*::test_a\[1-1\] PASSED + .*::test_b\[1-2\] SKIPPED(?:\s+\(.*\))? + .*::test_b\[1-3\] PASSED + .*::test_b\[1-4\] PASSED + .*::test_b\[2-3\] SKIPPED(?:\s+\(.*\))? + .*::test_b\[2-4\] SKIPPED(?:\s+\(.*\))? + .*::test_b\[3-4\] PASSED + .*::test_c\[1\] SKIPPED(?:\s+\(.*\))? + .*::test_c\[2\] PASSED + .*::test_c\[3\] SKIPPED(?:\s+\(.*\))? + """) + + +def test_runtime(ctestdir): + """Marking dependencies at runtime + """ + with get_example("runtime.py").open("rt") as f: + ctestdir.makepyfile(f.read()) + result = ctestdir.runpytest("--verbose") + try: + result.assert_outcomes(passed=1, skipped=2, failed=0, xfailed=1) + except TypeError: + result.assert_outcomes(passed=1, skipped=2, failed=0) + result.stdout.re_match_lines(r""" + .*::test_a PASSED + .*::test_b (?:XFAIL(?:\s+\(.*\))?|xfail) + .*::test_c SKIPPED(?:\s+\(.*\))? + .*::test_d SKIPPED(?:\s+\(.*\))? + """)