From 0e558572635a891f0ff7b5bce331e49a13a58d74 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Sat, 21 Dec 2024 09:18:00 -0500 Subject: [PATCH 01/23] build: Drop Python 3.8 support --- .github/workflows/ci.yml | 2 +- .../workflows/lower-bound-requirements.yml | 2 +- .github/workflows/release_tests.yml | 2 +- .pre-commit-config.yaml | 5 +++-- pyproject.toml | 3 +-- src/pyhf/contrib/utils.py | 21 +++++++------------ src/pyhf/mixins.py | 3 ++- src/pyhf/modifiers/staterror.py | 3 +-- src/pyhf/parameters/paramsets.py | 4 +--- src/pyhf/pdf.py | 4 ++-- src/pyhf/readxml.py | 14 ++++--------- src/pyhf/schema/loader.py | 9 +------- src/pyhf/schema/validator.py | 3 ++- src/pyhf/schema/variables.py | 8 +------ src/pyhf/tensor/numpy_backend.py | 6 ++++-- src/pyhf/typing.py | 9 +++----- src/pyhf/utils.py | 9 +------- tests/contrib/test_viz.py | 5 ----- 18 files changed, 36 insertions(+), 76 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a6d5d145ca..03180df6dc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] include: - os: macos-latest python-version: '3.13' diff --git a/.github/workflows/lower-bound-requirements.yml b/.github/workflows/lower-bound-requirements.yml index 190cec22a3..b4ac0bbd82 100644 --- a/.github/workflows/lower-bound-requirements.yml +++ b/.github/workflows/lower-bound-requirements.yml @@ -17,7 +17,7 @@ jobs: matrix: os: [ubuntu-latest] # minimum supported Python - python-version: ['3.8'] + python-version: ['3.9'] steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/release_tests.yml b/.github/workflows/release_tests.yml index 10531df951..08dffa1347 100644 --- a/.github/workflows/release_tests.yml +++ b/.github/workflows/release_tests.yml @@ -22,7 +22,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] include: - os: macos-latest python-version: '3.13' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 075396860f..ef188fe0b7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -55,14 +55,15 @@ repos: - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.13.0 # check the oldest and newest supported Pythons + # except skip python 3.9 for numpy, due to poor typing hooks: - &mypy id: mypy - name: mypy with Python 3.8 + name: mypy with Python 3.10 files: src additional_dependencies: ['numpy', 'types-tqdm', 'click', 'types-jsonpatch', 'types-pyyaml', 'types-jsonschema', 'importlib_metadata', 'packaging'] - args: ["--python-version=3.8"] + args: ["--python-version=3.10"] - <<: *mypy name: mypy with Python 3.13 args: ["--python-version=3.13"] diff --git a/pyproject.toml b/pyproject.toml index ba208ef848..c8668a9107 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ dynamic = ["version"] description = "pure-Python HistFactory implementation with tensors and autodiff" readme = "README.rst" license = "Apache-2.0" -requires-python = ">=3.8" +requires-python = ">=3.9" authors = [ { name = "Lukas Heinrich", email = "lukas.heinrich@cern.ch" }, { name = "Matthew Feickert", email = "matthew.feickert@cern.ch" }, @@ -32,7 +32,6 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/src/pyhf/contrib/utils.py b/src/pyhf/contrib/utils.py index 7fa9550242..a507fbc4b4 100644 --- a/src/pyhf/contrib/utils.py +++ b/src/pyhf/contrib/utils.py @@ -80,13 +80,7 @@ def download(archive_url, output_directory, force=False, compress=False): with open(output_directory, "wb") as archive: archive.write(response.content) else: - # Support for file-like objects for tarfile.is_tarfile was added - # in Python 3.9, so as pyhf is currently Python 3.8+ then can't - # do tarfile.is_tarfile(BytesIO(response.content)). - # Instead, just use a 'try except' block to determine if the - # archive is a valid tarfile. - # TODO: Simplify after pyhf is Python 3.9+ only - try: + if tarfile.is_tarfile(BytesIO(response.content)): # Use transparent compression to allow for .tar or .tar.gz with tarfile.open( mode="r:*", fileobj=BytesIO(response.content) @@ -97,13 +91,7 @@ def download(archive_url, output_directory, force=False, compress=False): archive.extractall(output_directory, filter="data") else: archive.extractall(output_directory) - except tarfile.ReadError: - if not zipfile.is_zipfile(BytesIO(response.content)): - raise exceptions.InvalidArchive( - f"The archive downloaded from {archive_url} is not a tarfile" - + " or a zipfile and so can not be opened as one." - ) - + elif zipfile.is_zipfile(BytesIO(response.content)): output_directory = Path(output_directory) if output_directory.exists(): rmtree(output_directory) @@ -129,6 +117,11 @@ def download(archive_url, output_directory, force=False, compress=False): # from creation time rmtree(output_directory) _tmp_path.replace(output_directory) + else: + raise exceptions.InvalidArchive( + f"The archive downloaded from {archive_url} is not a tarfile" + + " or a zipfile and so can not be opened as one." + ) except ModuleNotFoundError: log.error( diff --git a/src/pyhf/mixins.py b/src/pyhf/mixins.py index 0314188cc1..4855e95101 100644 --- a/src/pyhf/mixins.py +++ b/src/pyhf/mixins.py @@ -1,7 +1,8 @@ from __future__ import annotations import logging -from typing import Any, Sequence +from typing import Any +from collections.abc import Sequence from pyhf.typing import Channel diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index a6d6d499c5..bb4e9f3f3e 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -1,5 +1,4 @@ import logging -from typing import List import pyhf from pyhf import events @@ -10,7 +9,7 @@ log = logging.getLogger(__name__) -def required_parset(sigmas, fixed: List[bool]): +def required_parset(sigmas, fixed: list[bool]): n_parameters = len(sigmas) return { 'paramset_type': 'constrained_by_normal', diff --git a/src/pyhf/parameters/paramsets.py b/src/pyhf/parameters/paramsets.py index 2562c89305..3a59d4a1e8 100644 --- a/src/pyhf/parameters/paramsets.py +++ b/src/pyhf/parameters/paramsets.py @@ -1,5 +1,3 @@ -from typing import List - import pyhf __all__ = [ @@ -29,7 +27,7 @@ def __init__(self, **kwargs): ) @property - def suggested_fixed(self) -> List[bool]: + def suggested_fixed(self) -> list[bool]: if isinstance(self._suggested_fixed, bool): return [self._suggested_fixed] * self.n_parameters return self._suggested_fixed diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index ca051d1652..b8cfe612bc 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -2,7 +2,7 @@ import copy import logging -from typing import List, Union +from typing import Union import pyhf.parameters import pyhf @@ -406,7 +406,7 @@ def param_set(self, name): """ return self.par_map[name]['paramset'] - def suggested_fixed(self) -> List[bool]: + def suggested_fixed(self) -> list[bool]: """ Identify the fixed parameters in the model. diff --git a/src/pyhf/readxml.py b/src/pyhf/readxml.py index a694dab292..52612ae082 100644 --- a/src/pyhf/readxml.py +++ b/src/pyhf/readxml.py @@ -4,16 +4,10 @@ from typing import ( IO, Callable, - Iterable, - List, - MutableMapping, - MutableSequence, - Sequence, - Set, - Tuple, Union, cast, ) +from collections.abc import Iterable, MutableMapping, MutableSequence, Sequence import xml.etree.ElementTree as ET from pathlib import Path @@ -46,8 +40,8 @@ log = logging.getLogger(__name__) -FileCacheType = MutableMapping[str, Tuple[Union[IO[str], IO[bytes]], Set[str]]] -MountPathType = Iterable[Tuple[Path, Path]] +FileCacheType = MutableMapping[str, tuple[Union[IO[str], IO[bytes]], set[str]]] +MountPathType = Iterable[tuple[Path, Path]] ResolverType = Callable[[str], Path] __FILECACHE__: FileCacheType = {} @@ -99,7 +93,7 @@ def extract_error(hist: uproot.behaviors.TH1.TH1) -> list[float]: """ variance = hist.variances() if hist.weighted else hist.to_numpy()[0] - return cast(List[float], np.sqrt(variance).tolist()) + return cast(list[float], np.sqrt(variance).tolist()) def import_root_histogram( diff --git a/src/pyhf/schema/loader.py b/src/pyhf/schema/loader.py index 920766c4dc..0f0001faef 100644 --- a/src/pyhf/schema/loader.py +++ b/src/pyhf/schema/loader.py @@ -1,15 +1,8 @@ from pathlib import Path -import sys import json import pyhf.exceptions from pyhf.schema import variables - -# importlib.resources.as_file wasn't added until Python 3.9 -# c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file -if sys.version_info >= (3, 9): - from importlib import resources -else: - import importlib_resources as resources +from importlib import resources def load_schema(schema_id: str): diff --git a/src/pyhf/schema/validator.py b/src/pyhf/schema/validator.py index 2540a3d002..69230d7410 100644 --- a/src/pyhf/schema/validator.py +++ b/src/pyhf/schema/validator.py @@ -1,6 +1,7 @@ import numbers from pathlib import Path -from typing import Mapping, Union +from typing import Union +from collections.abc import Mapping import jsonschema diff --git a/src/pyhf/schema/variables.py b/src/pyhf/schema/variables.py index 80c0a0dd06..d02cc6b322 100644 --- a/src/pyhf/schema/variables.py +++ b/src/pyhf/schema/variables.py @@ -1,11 +1,5 @@ -import sys +from importlib import resources -# importlib.resources.as_file wasn't added until Python 3.9 -# c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file -if sys.version_info >= (3, 9): - from importlib import resources -else: - import importlib_resources as resources schemas = resources.files('pyhf') / "schemas" SCHEMA_CACHE = {} diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index e843330bb3..43fef6fdf4 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -3,7 +3,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Callable, Generic, Mapping, Sequence, TypeVar, Union +from typing import TYPE_CHECKING, Callable, Generic, TypeVar, Union +from collections.abc import Mapping, Sequence import numpy as np @@ -207,7 +208,8 @@ def conditional( def tolist(self, tensor_in: Tensor[T] | list[T]) -> list[T]: try: - return tensor_in.tolist() # type: ignore[union-attr,no-any-return] + # unused-ignore for [no-any-return] in python 3.9 + return tensor_in.tolist() # type: ignore[union-attr,no-any-return,unused-ignore] except AttributeError: if isinstance(tensor_in, list): return tensor_in diff --git a/src/pyhf/typing.py b/src/pyhf/typing.py index 19dd36c485..f3e4e5784c 100644 --- a/src/pyhf/typing.py +++ b/src/pyhf/typing.py @@ -2,14 +2,12 @@ from typing import ( Any, Literal, - MutableSequence, Protocol, - Sequence, SupportsIndex, - Tuple, TypedDict, Union, ) +from collections.abc import MutableSequence, Sequence __all__ = ( "Channel", @@ -35,10 +33,9 @@ ) -# TODO: Switch to os.PathLike[str] once Python 3.8 support dropped -PathOrStr = Union[str, "os.PathLike[str]"] +PathOrStr = Union[str, os.PathLike[str]] -Shape = Tuple[int, ...] +Shape = tuple[int, ...] ShapeLike = Union[SupportsIndex, Sequence[SupportsIndex]] diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index c9ad5d0185..ea2fdfa99a 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -4,14 +4,7 @@ import hashlib from gettext import gettext -import sys - -# importlib.resources.as_file wasn't added until Python 3.9 -# c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file -if sys.version_info >= (3, 9): - from importlib import resources -else: - import importlib_resources as resources +from importlib import resources __all__ = [ "EqDelimStringParamType", diff --git a/tests/contrib/test_viz.py b/tests/contrib/test_viz.py index f8f760931f..f7c0b29427 100644 --- a/tests/contrib/test_viz.py +++ b/tests/contrib/test_viz.py @@ -1,5 +1,4 @@ import json -import sys import matplotlib import matplotlib.pyplot as plt @@ -68,10 +67,6 @@ def test_plot_results(datadir): @pytest.mark.mpl_image_compare -@pytest.mark.xfail( - sys.version_info < (3, 8), - reason="baseline image generated with matplotlib v3.6.0 which is Python 3.8+", -) def test_plot_results_no_axis(datadir): data = json.load(datadir.joinpath("hypotest_results.json").open(encoding="utf-8")) From 719f6d9a3a7b1188f22e570ed8c8ff92d15c65f8 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 16 Oct 2025 11:15:27 +0200 Subject: [PATCH 02/23] typing: Cast transpose to ArrayLike Sonnet 4's summary: > The mypy error occurred because the transpose() method on line 659 was > returning a value that mypy inferred as Any, while the function signature > declared it should return ArrayLike. Mypy's no-any-return rule flags when > you return an Any value from a function with a specific return type annotation. --- src/pyhf/tensor/numpy_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index 43fef6fdf4..5a18824460 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -656,4 +656,4 @@ def transpose(self, tensor_in: Tensor[T]) -> ArrayLike: .. versionadded:: 0.7.0 """ - return tensor_in.transpose() + return cast(ArrayLike, tensor_in.transpose()) From c9fc79c9b4b8f56976608161ef3fd44c3abf0237 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 16 Oct 2025 11:31:07 +0200 Subject: [PATCH 03/23] build: Remove importlib_resources dependency for Python 3.8 --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c8668a9107..afa53ffb7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,6 @@ classifiers = [ ] dependencies = [ "click>=8.0.0", # for console scripts - "importlib_resources>=1.4.0; python_version < '3.9'", # for resources in schema "jsonpatch>=1.15", "jsonschema>=4.15.0", # for utils "pyyaml>=5.1", # for parsing CLI equal-delimited options From 6b95ab79127bf92821604a7c344a33b169f0e5fa Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 16 Oct 2025 17:40:40 +0200 Subject: [PATCH 04/23] Add note about casting to ArrayLike for Python 3.10 mypy --- src/pyhf/tensor/numpy_backend.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index 5a18824460..2cc36545a2 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -656,4 +656,5 @@ def transpose(self, tensor_in: Tensor[T]) -> ArrayLike: .. versionadded:: 0.7.0 """ + # TODO: Casting needed for Python 3.10 mypy but not Python 3.13? return cast(ArrayLike, tensor_in.transpose()) From 034014847c474194024545ea4a2a32431f540fe4 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 11:36:48 -0600 Subject: [PATCH 05/23] Remove Black formatting --- .pre-commit-config.yaml | 12 ------------ pyproject.toml | 13 ------------- 2 files changed, 25 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef188fe0b7..a750eec5a9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,18 +40,6 @@ repos: - id: ruff-check args: ["--fix", "--show-fixes"] -- repo: https://github.com/psf/black-pre-commit-mirror - rev: 24.10.0 - hooks: - - id: black-jupyter - types_or: [python, pyi, jupyter] - -- repo: https://github.com/adamchainz/blacken-docs - rev: 1.19.1 - hooks: - - id: blacken-docs - additional_dependencies: [black==24.10.0] - - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.13.0 # check the oldest and newest supported Pythons diff --git a/pyproject.toml b/pyproject.toml index afa53ffb7d..3efdb2b09b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -147,19 +147,6 @@ exclude = [ [tool.hatch.build.targets.wheel] packages = ["src/pyhf"] -[tool.black] -line-length = 88 -skip-string-normalization = true -include = '\.pyi?$' -exclude = ''' -/( - \.git - | .eggs - | build - | .nox -)/ -''' - [tool.pytest.ini_options] minversion = "6.0" xfail_strict = true From 48376e1a5317bdfe4ce2356c023026019edd2c96 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 11:37:04 -0600 Subject: [PATCH 06/23] Add isort to Ruff formatting --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3efdb2b09b..29e2fd1641 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -252,9 +252,10 @@ line-length = 88 [tool.ruff.lint] extend-select = [ - "UP", # pyupgrade - "RUF", # Ruff-specific - "TID", # flake8-tidy-imports + "UP", # pyupgrade + "RUF", # Ruff-specific + "TID", # flake8-tidy-imports + "I", # isort ] ignore = [ "E402", From 3b8781c6e123660e556479cd6e055966c7ec0aa1 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 11:37:57 -0600 Subject: [PATCH 07/23] Apply isort in pre-commit --- docs/examples/notebooks/Recast.ipynb | 1 + docs/examples/notebooks/ShapeFactor.ipynb | 5 +++-- docs/examples/notebooks/StatError.ipynb | 10 ++++++---- docs/examples/notebooks/altair.ipynb | 7 ++++--- .../binderexample/StatisticalAnalysis.ipynb | 11 +++++------ .../notebooks/learn/TestStatistics.ipynb | 3 ++- .../notebooks/learn/UsingCalculators.ipynb | 1 + docs/examples/notebooks/multiBinPois.ipynb | 12 ++++++------ .../multichannel-coupled-normsys.ipynb | 3 ++- .../notebooks/multichannel-normsys.ipynb | 3 ++- docs/examples/notebooks/pullplot.ipynb | 6 ++++-- docs/examples/notebooks/toys.ipynb | 3 ++- docs/exts/xref.py | 1 - src/pyhf/__init__.py | 15 +++++---------- src/pyhf/cli/__init__.py | 4 ++-- src/pyhf/cli/cli.py | 5 ++--- src/pyhf/cli/infer.py | 7 +++---- src/pyhf/cli/patchset.py | 2 +- src/pyhf/cli/rootio.py | 7 ++++--- src/pyhf/cli/spec.py | 6 ++---- src/pyhf/constraints.py | 2 +- src/pyhf/contrib/cli.py | 3 ++- src/pyhf/infer/__init__.py | 3 +-- src/pyhf/infer/calculators.py | 11 ++++++----- src/pyhf/infer/test_statistics.py | 6 +++--- src/pyhf/infer/utils.py | 6 +++--- src/pyhf/interpolators/__init__.py | 10 +++++----- src/pyhf/interpolators/code0.py | 3 ++- src/pyhf/interpolators/code1.py | 3 ++- src/pyhf/interpolators/code2.py | 3 ++- src/pyhf/interpolators/code4.py | 3 ++- src/pyhf/interpolators/code4p.py | 3 ++- src/pyhf/mixins.py | 2 +- src/pyhf/modifiers/lumi.py | 2 +- src/pyhf/modifiers/normfactor.py | 2 +- src/pyhf/modifiers/normsys.py | 3 +-- src/pyhf/modifiers/shapefactor.py | 2 +- src/pyhf/optimize/common.py | 2 +- src/pyhf/optimize/opt_jax.py | 6 ++++-- src/pyhf/optimize/opt_minuit.py | 5 +++-- src/pyhf/optimize/opt_numpy.py | 3 +-- src/pyhf/optimize/opt_scipy.py | 3 ++- src/pyhf/parameters/__init__.py | 6 +++--- src/pyhf/parameters/paramview.py | 4 ++-- src/pyhf/patchset.py | 6 +++--- src/pyhf/pdf.py | 12 +++++------- src/pyhf/readxml.py | 11 ++++------- src/pyhf/schema/__init__.py | 3 ++- src/pyhf/schema/loader.py | 5 +++-- src/pyhf/schema/validator.py | 2 +- src/pyhf/tensor/common.py | 2 +- src/pyhf/tensor/jax_backend.py | 13 +++++++------ src/pyhf/tensor/numpy_backend.py | 5 +++-- src/pyhf/typing.py | 2 +- src/pyhf/utils.py | 8 ++++---- src/pyhf/workspace.py | 4 ++-- src/pyhf/writexml.py | 5 ++--- tests/benchmarks/test_benchmark.py | 5 +++-- tests/test_backend_consistency.py | 3 ++- tests/test_backends.py | 3 ++- tests/test_cli.py | 5 +++-- tests/test_combined_modifiers.py | 15 ++++++++------- tests/test_constraints.py | 7 ++++--- tests/test_custom_mods.py | 3 ++- tests/test_events.py | 3 ++- tests/test_import.py | 14 ++++++++------ tests/test_init.py | 4 +++- tests/test_interpolate.py | 3 ++- tests/test_jit.py | 6 ++++-- tests/test_mixins.py | 3 ++- tests/test_optim.py | 17 +++++++++-------- tests/test_paramsets.py | 3 ++- tests/test_patchset.py | 8 +++++--- tests/test_pdf.py | 8 +++++--- tests/test_probability.py | 3 ++- tests/test_public_api.py | 6 ++++-- tests/test_simplemodels.py | 1 + tests/test_teststats.py | 4 +++- tests/test_toys.py | 3 ++- tests/test_utils.py | 1 + tests/test_workspace.py | 16 +++++++++------- validation/manualonoff_roofit/onoff.py | 1 + .../makedata.py | 4 ++-- .../makedata.py | 4 ++-- validation/multichannel_histfactory/makedata.py | 4 ++-- validation/xmlimport_input2/makedata.py | 4 ++-- 86 files changed, 244 insertions(+), 203 deletions(-) diff --git a/docs/examples/notebooks/Recast.ipynb b/docs/examples/notebooks/Recast.ipynb index 4655a7a473..310516874a 100644 --- a/docs/examples/notebooks/Recast.ipynb +++ b/docs/examples/notebooks/Recast.ipynb @@ -37,6 +37,7 @@ ], "source": [ "import jsonpatch\n", + "\n", "import pyhf\n", "from pyhf.contrib.viz import brazil\n", "\n", diff --git a/docs/examples/notebooks/ShapeFactor.ipynb b/docs/examples/notebooks/ShapeFactor.ipynb index db5bf3ac55..c387a96a3a 100644 --- a/docs/examples/notebooks/ShapeFactor.ipynb +++ b/docs/examples/notebooks/ShapeFactor.ipynb @@ -13,10 +13,11 @@ "metadata": {}, "outputs": [], "source": [ - "import logging\n", "import json\n", - "import numpy as np\n", + "import logging\n", + "\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "\n", "import pyhf\n", "from pyhf.contrib.viz import brazil\n", diff --git a/docs/examples/notebooks/StatError.ipynb b/docs/examples/notebooks/StatError.ipynb index 7cfad53f57..95f013e5c9 100644 --- a/docs/examples/notebooks/StatError.ipynb +++ b/docs/examples/notebooks/StatError.ipynb @@ -13,8 +13,9 @@ "metadata": {}, "outputs": [], "source": [ - "import pyhf\n", - "import importlib" + "import importlib\n", + "\n", + "import pyhf" ] }, { @@ -195,10 +196,11 @@ "metadata": {}, "outputs": [], "source": [ - "import pyhf\n", "import json\n", "import logging\n", - "from pyhf import runOnePoint, Model\n", + "\n", + "import pyhf\n", + "from pyhf import Model, runOnePoint\n", "from pyhf.simplemodels import uncorrelated_background\n", "\n", "\n", diff --git a/docs/examples/notebooks/altair.ipynb b/docs/examples/notebooks/altair.ipynb index 90bba60d6d..666bf4efb3 100644 --- a/docs/examples/notebooks/altair.ipynb +++ b/docs/examples/notebooks/altair.ipynb @@ -6,10 +6,11 @@ "metadata": {}, "outputs": [], "source": [ - "import pyhf\n", - "import pandas\n", + "import altair as alt\n", "import numpy as np\n", - "import altair as alt" + "import pandas\n", + "\n", + "import pyhf" ] }, { diff --git a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb index b2c0ca5063..7eccfd456b 100644 --- a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb +++ b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb @@ -6,18 +6,17 @@ "metadata": {}, "outputs": [], "source": [ + "import base64\n", "from pathlib import Path\n", "\n", - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "from IPython.display import HTML, display\n", + "from ipywidgets import fixed, interact\n", "\n", "import pyhf\n", "import pyhf.readxml\n", - "from pyhf.contrib.viz import brazil\n", - "\n", - "import base64\n", - "from IPython.display import display, HTML\n", - "from ipywidgets import interact, fixed" + "from pyhf.contrib.viz import brazil" ] }, { diff --git a/docs/examples/notebooks/learn/TestStatistics.ipynb b/docs/examples/notebooks/learn/TestStatistics.ipynb index 5d9a30d5f2..282216bd4f 100644 --- a/docs/examples/notebooks/learn/TestStatistics.ipynb +++ b/docs/examples/notebooks/learn/TestStatistics.ipynb @@ -16,8 +16,9 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", "import pyhf\n", "\n", "np.random.seed(0)\n", diff --git a/docs/examples/notebooks/learn/UsingCalculators.ipynb b/docs/examples/notebooks/learn/UsingCalculators.ipynb index 6dada1c16f..409dbbdef3 100644 --- a/docs/examples/notebooks/learn/UsingCalculators.ipynb +++ b/docs/examples/notebooks/learn/UsingCalculators.ipynb @@ -18,6 +18,7 @@ "outputs": [], "source": [ "import numpy as np\n", + "\n", "import pyhf\n", "\n", "np.random.seed(0)" diff --git a/docs/examples/notebooks/multiBinPois.ipynb b/docs/examples/notebooks/multiBinPois.ipynb index ec2a0c6b59..57bf6733c8 100644 --- a/docs/examples/notebooks/multiBinPois.ipynb +++ b/docs/examples/notebooks/multiBinPois.ipynb @@ -13,19 +13,19 @@ "metadata": {}, "outputs": [], "source": [ - "import logging\n", "import json\n", + "import logging\n", "import math\n", - "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import scrapbook as sb\n", + "from scipy.interpolate import griddata\n", "\n", "import pyhf\n", "from pyhf import Model, optimizer\n", - "from pyhf.simplemodels import uncorrelated_background\n", "from pyhf.contrib.viz import brazil\n", - "\n", - "from scipy.interpolate import griddata\n", - "import scrapbook as sb" + "from pyhf.simplemodels import uncorrelated_background" ] }, { diff --git a/docs/examples/notebooks/multichannel-coupled-normsys.ipynb b/docs/examples/notebooks/multichannel-coupled-normsys.ipynb index 7ffbd69a85..0abcbc7e16 100644 --- a/docs/examples/notebooks/multichannel-coupled-normsys.ipynb +++ b/docs/examples/notebooks/multichannel-coupled-normsys.ipynb @@ -30,9 +30,10 @@ "metadata": {}, "outputs": [], "source": [ - "import pyhf\n", "import logging\n", "\n", + "import pyhf\n", + "\n", "logging.basicConfig(level=logging.INFO)\n", "from pyhf import Model\n", "\n", diff --git a/docs/examples/notebooks/multichannel-normsys.ipynb b/docs/examples/notebooks/multichannel-normsys.ipynb index d63144103a..df65572e49 100644 --- a/docs/examples/notebooks/multichannel-normsys.ipynb +++ b/docs/examples/notebooks/multichannel-normsys.ipynb @@ -30,8 +30,9 @@ "metadata": {}, "outputs": [], "source": [ - "import pyhf\n", "import logging\n", + "\n", + "import pyhf\n", "from pyhf import Model\n", "\n", "\n", diff --git a/docs/examples/notebooks/pullplot.ipynb b/docs/examples/notebooks/pullplot.ipynb index a259ba2cc9..ad2a1ff924 100644 --- a/docs/examples/notebooks/pullplot.ipynb +++ b/docs/examples/notebooks/pullplot.ipynb @@ -6,10 +6,12 @@ "metadata": {}, "outputs": [], "source": [ - "import pyhf\n", "import json\n", - "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", + "import pyhf\n", "\n", "%matplotlib inline" ] diff --git a/docs/examples/notebooks/toys.ipynb b/docs/examples/notebooks/toys.ipynb index 2d826d0f3b..78d527431d 100644 --- a/docs/examples/notebooks/toys.ipynb +++ b/docs/examples/notebooks/toys.ipynb @@ -6,8 +6,9 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", "import pyhf" ] }, diff --git a/docs/exts/xref.py b/docs/exts/xref.py index 755ece671b..d61cb9e8f0 100644 --- a/docs/exts/xref.py +++ b/docs/exts/xref.py @@ -1,5 +1,4 @@ from docutils import nodes - from sphinx.util import caption_ref_re diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index cb3c0405e4..68eb6db76e 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,16 +1,11 @@ -from pyhf.tensor import BackendRetriever as tensor -from pyhf.optimize import OptimizerRetriever as optimize # noqa -from pyhf.tensor.manager import get_backend -from pyhf.tensor.manager import set_backend +from pyhf import compat, infer, schema, simplemodels from pyhf._version import version as __version__ - +from pyhf.optimize import OptimizerRetriever as optimize # noqa +from pyhf.patchset import PatchSet from pyhf.pdf import Model +from pyhf.tensor import BackendRetriever as tensor +from pyhf.tensor.manager import get_backend, set_backend from pyhf.workspace import Workspace -from pyhf import schema -from pyhf import simplemodels -from pyhf import infer -from pyhf import compat -from pyhf.patchset import PatchSet __all__ = [ "Model", diff --git a/src/pyhf/cli/__init__.py b/src/pyhf/cli/__init__.py index 988998d39a..f11dae7cc5 100644 --- a/src/pyhf/cli/__init__.py +++ b/src/pyhf/cli/__init__.py @@ -1,10 +1,10 @@ """The pyhf command line interface.""" from pyhf.cli.cli import pyhf as cli +from pyhf.cli.complete import cli as complete +from pyhf.cli.infer import cli as infer from pyhf.cli.rootio import cli as rootio from pyhf.cli.spec import cli as spec -from pyhf.cli.infer import cli as infer -from pyhf.cli.complete import cli as complete from pyhf.contrib import cli as contrib __all__ = ['cli', 'complete', 'contrib', 'infer', 'rootio', 'spec'] diff --git a/src/pyhf/cli/cli.py b/src/pyhf/cli/cli.py index cd458a6329..d4fb8c779d 100644 --- a/src/pyhf/cli/cli.py +++ b/src/pyhf/cli/cli.py @@ -4,10 +4,9 @@ import click -from pyhf import __version__ -from pyhf.cli import rootio, spec, infer, patchset, complete +from pyhf import __version__, utils +from pyhf.cli import complete, infer, patchset, rootio, spec from pyhf.contrib import cli as contrib -from pyhf import utils logging.basicConfig() log = logging.getLogger(__name__) diff --git a/src/pyhf/cli/infer.py b/src/pyhf/cli/infer.py index 9722c52d16..b3acb5ba8d 100644 --- a/src/pyhf/cli/infer.py +++ b/src/pyhf/cli/infer.py @@ -1,15 +1,14 @@ """The inference CLI group.""" +import json import logging import click -import json +from pyhf import get_backend, optimize, set_backend +from pyhf.infer import hypotest, mle from pyhf.utils import EqDelimStringParamType -from pyhf.infer import hypotest -from pyhf.infer import mle from pyhf.workspace import Workspace -from pyhf import get_backend, set_backend, optimize log = logging.getLogger(__name__) diff --git a/src/pyhf/cli/patchset.py b/src/pyhf/cli/patchset.py index 4d31d4963d..5ba1deb4da 100644 --- a/src/pyhf/cli/patchset.py +++ b/src/pyhf/cli/patchset.py @@ -1,9 +1,9 @@ """The pyhf spec CLI subcommand.""" +import json import logging import click -import json from pyhf.patchset import PatchSet from pyhf.workspace import Workspace diff --git a/src/pyhf/cli/rootio.py b/src/pyhf/cli/rootio.py index c5d5840762..90d5c427eb 100644 --- a/src/pyhf/cli/rootio.py +++ b/src/pyhf/cli/rootio.py @@ -1,12 +1,13 @@ """CLI subapps to handle conversion from ROOT.""" -import logging - -import click import json +import logging import os from pathlib import Path + +import click import jsonpatch + from pyhf.utils import VolumeMountPath log = logging.getLogger(__name__) diff --git a/src/pyhf/cli/spec.py b/src/pyhf/cli/spec.py index 08fdd73d94..7cc0430b00 100644 --- a/src/pyhf/cli/spec.py +++ b/src/pyhf/cli/spec.py @@ -1,14 +1,12 @@ """The pyhf spec CLI subcommand.""" +import json import logging import click -import json +from pyhf import modifiers, parameters, utils from pyhf.workspace import Workspace -from pyhf import modifiers -from pyhf import parameters -from pyhf import utils log = logging.getLogger(__name__) diff --git a/src/pyhf/constraints.py b/src/pyhf/constraints.py index 30ef835e79..acd1b35af3 100644 --- a/src/pyhf/constraints.py +++ b/src/pyhf/constraints.py @@ -1,8 +1,8 @@ import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events from pyhf import probability as prob from pyhf.parameters import ParamViewer +from pyhf.tensor.manager import get_backend __all__ = ["gaussian_constraint_combined", "poisson_constraint_combined"] diff --git a/src/pyhf/contrib/cli.py b/src/pyhf/contrib/cli.py index 5bba47e4d4..8a9478ce9b 100644 --- a/src/pyhf/contrib/cli.py +++ b/src/pyhf/contrib/cli.py @@ -1,9 +1,10 @@ """CLI for functionality that will get migrated out eventually.""" import logging -import click from pathlib import Path +import click + logging.basicConfig() log = logging.getLogger(__name__) diff --git a/src/pyhf/infer/__init__.py b/src/pyhf/infer/__init__.py index 058a7460d6..3f43ae34fe 100644 --- a/src/pyhf/infer/__init__.py +++ b/src/pyhf/infer/__init__.py @@ -1,8 +1,7 @@ """Inference for Statistical Models.""" +from pyhf import exceptions, get_backend from pyhf.infer import utils -from pyhf import get_backend -from pyhf import exceptions def _check_hypotest_prerequisites(pdf, data, init_pars, par_bounds, fixed_params): diff --git a/src/pyhf/infer/calculators.py b/src/pyhf/infer/calculators.py index 12bb6d5bce..ff5f59026a 100644 --- a/src/pyhf/infer/calculators.py +++ b/src/pyhf/infer/calculators.py @@ -8,13 +8,14 @@ Using the calculators hypothesis tests can then be performed. """ -from pyhf.infer.mle import fixed_poi_fit -from pyhf import get_backend -from pyhf.infer import utils +import logging +from dataclasses import dataclass + import tqdm -from dataclasses import dataclass -import logging +from pyhf import get_backend +from pyhf.infer import utils +from pyhf.infer.mle import fixed_poi_fit log = logging.getLogger(__name__) diff --git a/src/pyhf/infer/test_statistics.py b/src/pyhf/infer/test_statistics.py index 97b6babe79..f9940c678e 100644 --- a/src/pyhf/infer/test_statistics.py +++ b/src/pyhf/infer/test_statistics.py @@ -1,8 +1,8 @@ +import logging + from pyhf import get_backend -from pyhf.infer.mle import fixed_poi_fit, fit from pyhf.exceptions import UnspecifiedPOI - -import logging +from pyhf.infer.mle import fit, fixed_poi_fit log = logging.getLogger(__name__) diff --git a/src/pyhf/infer/utils.py b/src/pyhf/infer/utils.py index a9eeefac49..27437b110c 100644 --- a/src/pyhf/infer/utils.py +++ b/src/pyhf/infer/utils.py @@ -1,11 +1,11 @@ """Inference for Statistical Models.""" -from pyhf.infer.calculators import AsymptoticCalculator, ToyCalculator +import logging + from pyhf.exceptions import InvalidTestStatistic +from pyhf.infer.calculators import AsymptoticCalculator, ToyCalculator from pyhf.infer.test_statistics import q0, qmu, qmu_tilde -import logging - log = logging.getLogger(__name__) __all__ = ["create_calculator", "get_test_stat"] diff --git a/src/pyhf/interpolators/__init__.py b/src/pyhf/interpolators/__init__.py index b04093d1e6..5c451fec3a 100644 --- a/src/pyhf/interpolators/__init__.py +++ b/src/pyhf/interpolators/__init__.py @@ -19,12 +19,12 @@ def _slow_interpolator_looper(histogramssets, alphasets, func): # interpolation codes come from https://cds.cern.ch/record/1456844/files/CERN-OPEN-2012-016.pdf -from pyhf.interpolators.code0 import code0, _slow_code0 -from pyhf.interpolators.code1 import code1, _slow_code1 -from pyhf.interpolators.code2 import code2, _slow_code2 -from pyhf.interpolators.code4 import code4, _slow_code4 -from pyhf.interpolators.code4p import code4p, _slow_code4p from pyhf import exceptions +from pyhf.interpolators.code0 import _slow_code0, code0 +from pyhf.interpolators.code1 import _slow_code1, code1 +from pyhf.interpolators.code2 import _slow_code2, code2 +from pyhf.interpolators.code4 import _slow_code4, code4 +from pyhf.interpolators.code4p import _slow_code4p, code4p def get(interpcode, do_tensorized_calc=True): diff --git a/src/pyhf/interpolators/code0.py b/src/pyhf/interpolators/code0.py index 52d87fefba..554dec670e 100644 --- a/src/pyhf/interpolators/code0.py +++ b/src/pyhf/interpolators/code0.py @@ -1,10 +1,11 @@ """Piecewise-linear Interpolation. (Code 0).""" import logging + import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/interpolators/code1.py b/src/pyhf/interpolators/code1.py index a9fa41c011..9c70675b73 100644 --- a/src/pyhf/interpolators/code1.py +++ b/src/pyhf/interpolators/code1.py @@ -2,10 +2,11 @@ import logging import math + import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/interpolators/code2.py b/src/pyhf/interpolators/code2.py index 8dff3278e7..e4cb073088 100644 --- a/src/pyhf/interpolators/code2.py +++ b/src/pyhf/interpolators/code2.py @@ -1,10 +1,11 @@ """Quadratic Interpolation (Code 2).""" import logging + import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/interpolators/code4.py b/src/pyhf/interpolators/code4.py index 0290d0c52d..5d382133e5 100644 --- a/src/pyhf/interpolators/code4.py +++ b/src/pyhf/interpolators/code4.py @@ -2,10 +2,11 @@ import logging import math + import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/interpolators/code4p.py b/src/pyhf/interpolators/code4p.py index 8841eb0dd0..a3d292845e 100644 --- a/src/pyhf/interpolators/code4p.py +++ b/src/pyhf/interpolators/code4p.py @@ -1,10 +1,11 @@ """Piecewise-Linear + Polynomial Interpolation (Code 4p).""" import logging + import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/mixins.py b/src/pyhf/mixins.py index 4855e95101..07667ade30 100644 --- a/src/pyhf/mixins.py +++ b/src/pyhf/mixins.py @@ -1,8 +1,8 @@ from __future__ import annotations import logging -from typing import Any from collections.abc import Sequence +from typing import Any from pyhf.typing import Channel diff --git a/src/pyhf/modifiers/lumi.py b/src/pyhf/modifiers/lumi.py index f8697b7bee..854534d7f9 100644 --- a/src/pyhf/modifiers/lumi.py +++ b/src/pyhf/modifiers/lumi.py @@ -1,6 +1,6 @@ import logging -from pyhf import get_backend, events +from pyhf import events, get_backend from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/normfactor.py b/src/pyhf/modifiers/normfactor.py index fb723664f0..6d8cbd5852 100644 --- a/src/pyhf/modifiers/normfactor.py +++ b/src/pyhf/modifiers/normfactor.py @@ -1,6 +1,6 @@ import logging -from pyhf import get_backend, events +from pyhf import events, get_backend from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/normsys.py b/src/pyhf/modifiers/normsys.py index e536bfe7a4..8360b498e9 100644 --- a/src/pyhf/modifiers/normsys.py +++ b/src/pyhf/modifiers/normsys.py @@ -1,7 +1,6 @@ import logging -from pyhf import get_backend, events -from pyhf import interpolators +from pyhf import events, get_backend, interpolators from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index 64b0db2a48..e4c91c968a 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -2,8 +2,8 @@ import pyhf from pyhf import events -from pyhf.tensor.manager import get_backend from pyhf.parameters import ParamViewer +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/optimize/common.py b/src/pyhf/optimize/common.py index 36692d1a80..24483fe82f 100644 --- a/src/pyhf/optimize/common.py +++ b/src/pyhf/optimize/common.py @@ -1,7 +1,7 @@ """Common Backend Shim to prepare minimization for optimizer.""" -from pyhf.tensor.manager import get_backend from pyhf.tensor.common import _TensorViewer +from pyhf.tensor.manager import get_backend def _make_stitch_pars(tv=None, fixed_values=None): diff --git a/src/pyhf/optimize/opt_jax.py b/src/pyhf/optimize/opt_jax.py index 5567678844..119e6982ba 100644 --- a/src/pyhf/optimize/opt_jax.py +++ b/src/pyhf/optimize/opt_jax.py @@ -1,9 +1,11 @@ """JAX Backend Function Shim.""" +import logging + +import jax + from pyhf import get_backend from pyhf.tensor.common import _TensorViewer -import jax -import logging log = logging.getLogger(__name__) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 24c30decc7..8ba7d094c5 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -1,9 +1,10 @@ """Minuit Optimizer Class.""" +import iminuit +import scipy + from pyhf import exceptions from pyhf.optimize.mixins import OptimizerMixin -import scipy -import iminuit class minuit_optimizer(OptimizerMixin): diff --git a/src/pyhf/optimize/opt_numpy.py b/src/pyhf/optimize/opt_numpy.py index 8f3d4178cf..44713933c7 100644 --- a/src/pyhf/optimize/opt_numpy.py +++ b/src/pyhf/optimize/opt_numpy.py @@ -1,7 +1,6 @@ """Numpy Backend Function Shim.""" -from pyhf import get_backend -from pyhf import exceptions +from pyhf import exceptions, get_backend def wrap_objective(objective, data, pdf, stitch_pars, do_grad=False, jit_pieces=None): diff --git a/src/pyhf/optimize/opt_scipy.py b/src/pyhf/optimize/opt_scipy.py index 73b4b6bae2..6b8de59cc8 100644 --- a/src/pyhf/optimize/opt_scipy.py +++ b/src/pyhf/optimize/opt_scipy.py @@ -1,8 +1,9 @@ """SciPy Optimizer Class.""" +import scipy + from pyhf import exceptions from pyhf.optimize.mixins import OptimizerMixin -import scipy class scipy_optimizer(OptimizerMixin): diff --git a/src/pyhf/parameters/__init__.py b/src/pyhf/parameters/__init__.py index 515e2bd48f..1dc89f22b7 100644 --- a/src/pyhf/parameters/__init__.py +++ b/src/pyhf/parameters/__init__.py @@ -1,11 +1,11 @@ from pyhf.parameters.paramsets import ( - paramset, - unconstrained, constrained_by_normal, constrained_by_poisson, + paramset, + unconstrained, ) -from pyhf.parameters.utils import reduce_paramsets_requirements from pyhf.parameters.paramview import ParamViewer +from pyhf.parameters.utils import reduce_paramsets_requirements __all__ = [ 'ParamViewer', diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index 0238832747..170c2ef9ab 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -1,10 +1,10 @@ import pyhf from pyhf import events -from pyhf.tensor.manager import get_backend from pyhf.tensor.common import ( - _tensorviewer_from_slices, _tensorviewer_from_sizes, + _tensorviewer_from_slices, ) +from pyhf.tensor.manager import get_backend __all__ = ["ParamViewer"] diff --git a/src/pyhf/patchset.py b/src/pyhf/patchset.py index 86acaef693..be0a92d673 100644 --- a/src/pyhf/patchset.py +++ b/src/pyhf/patchset.py @@ -3,10 +3,10 @@ """ import logging + import jsonpatch -from pyhf import exceptions -from pyhf import utils -from pyhf import schema + +from pyhf import exceptions, schema, utils from pyhf.workspace import Workspace log = logging.getLogger(__name__) diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index b8cfe612bc..5cc98ad2c6 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -4,18 +4,16 @@ import logging from typing import Union -import pyhf.parameters import pyhf -from pyhf.tensor.manager import get_backend -from pyhf import exceptions -from pyhf import schema -from pyhf import events +import pyhf.parameters +from pyhf import events, exceptions, schema from pyhf import probability as prob from pyhf.constraints import gaussian_constraint_combined, poisson_constraint_combined -from pyhf.parameters import reduce_paramsets_requirements, ParamViewer -from pyhf.tensor.common import _TensorViewer, _tensorviewer_from_sizes from pyhf.mixins import _ChannelSummaryMixin from pyhf.modifiers import histfactory_set +from pyhf.parameters import ParamViewer, reduce_paramsets_requirements +from pyhf.tensor.common import _TensorViewer, _tensorviewer_from_sizes +from pyhf.tensor.manager import get_backend log = logging.getLogger(__name__) diff --git a/src/pyhf/readxml.py b/src/pyhf/readxml.py index 52612ae082..8356b00634 100644 --- a/src/pyhf/readxml.py +++ b/src/pyhf/readxml.py @@ -1,24 +1,21 @@ from __future__ import annotations import logging +import xml.etree.ElementTree as ET +from collections.abc import Iterable, MutableMapping, MutableSequence, Sequence +from pathlib import Path from typing import ( IO, Callable, Union, cast, ) -from collections.abc import Iterable, MutableMapping, MutableSequence, Sequence - -import xml.etree.ElementTree as ET -from pathlib import Path import numpy as np import tqdm import uproot -from pyhf import compat -from pyhf import exceptions -from pyhf import schema +from pyhf import compat, exceptions, schema from pyhf.typing import ( Channel, HistoSys, diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py index a57c7d82f4..085cb4e7b9 100644 --- a/src/pyhf/schema/__init__.py +++ b/src/pyhf/schema/__init__.py @@ -4,9 +4,10 @@ import pathlib import sys + +from pyhf.schema import variables from pyhf.schema.loader import load_schema from pyhf.schema.validator import validate -from pyhf.schema import variables __all__ = [ "load_schema", diff --git a/src/pyhf/schema/loader.py b/src/pyhf/schema/loader.py index 0f0001faef..fd293b2559 100644 --- a/src/pyhf/schema/loader.py +++ b/src/pyhf/schema/loader.py @@ -1,8 +1,9 @@ -from pathlib import Path import json +from importlib import resources +from pathlib import Path + import pyhf.exceptions from pyhf.schema import variables -from importlib import resources def load_schema(schema_id: str): diff --git a/src/pyhf/schema/validator.py b/src/pyhf/schema/validator.py index 69230d7410..f23eef3859 100644 --- a/src/pyhf/schema/validator.py +++ b/src/pyhf/schema/validator.py @@ -1,7 +1,7 @@ import numbers +from collections.abc import Mapping from pathlib import Path from typing import Union -from collections.abc import Mapping import jsonschema diff --git a/src/pyhf/tensor/common.py b/src/pyhf/tensor/common.py index cc5de609ad..5b6623c233 100644 --- a/src/pyhf/tensor/common.py +++ b/src/pyhf/tensor/common.py @@ -1,6 +1,6 @@ import pyhf -from pyhf.tensor.manager import get_backend from pyhf import events +from pyhf.tensor.manager import get_backend class _TensorViewer: diff --git a/src/pyhf/tensor/jax_backend.py b/src/pyhf/tensor/jax_backend.py index e616302446..25685bb550 100644 --- a/src/pyhf/tensor/jax_backend.py +++ b/src/pyhf/tensor/jax_backend.py @@ -2,15 +2,16 @@ config.update('jax_enable_x64', True) -from jax.core import Tracer -from jax import Array +import logging + import jax.numpy as jnp -from jax.scipy.special import gammaln, xlogy -from jax.scipy import special -from jax.scipy.stats import norm import numpy as np import scipy.stats as osp_stats -import logging +from jax import Array +from jax.core import Tracer +from jax.scipy import special +from jax.scipy.special import gammaln, xlogy +from jax.scipy.stats import norm log = logging.getLogger(__name__) diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index 2cc36545a2..0e437d2b75 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -3,8 +3,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Callable, Generic, TypeVar, Union from collections.abc import Mapping, Sequence +from typing import TYPE_CHECKING, Callable, Generic, TypeVar, Union import numpy as np @@ -15,12 +15,13 @@ ArrayLike = "ArrayLike" NBitBase = "NBitBase" +from typing import cast + from scipy import special from scipy.special import gammaln, xlogy from scipy.stats import norm, poisson from pyhf.typing import Literal, Shape -from typing import cast T = TypeVar("T", bound=NBitBase) diff --git a/src/pyhf/typing.py b/src/pyhf/typing.py index f3e4e5784c..1a722d404b 100644 --- a/src/pyhf/typing.py +++ b/src/pyhf/typing.py @@ -1,4 +1,5 @@ import os +from collections.abc import MutableSequence, Sequence from typing import ( Any, Literal, @@ -7,7 +8,6 @@ TypedDict, Union, ) -from collections.abc import MutableSequence, Sequence __all__ = ( "Channel", diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index ea2fdfa99a..fbf231b70b 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -1,11 +1,11 @@ -import json -import yaml -import click import hashlib +import json from gettext import gettext - from importlib import resources +import click +import yaml + __all__ = [ "EqDelimStringParamType", "citation", diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 08b285d25e..215b38cb42 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -10,7 +10,9 @@ import collections import copy +import functools import logging +import operator from typing import ClassVar import jsonpatch @@ -18,8 +20,6 @@ from pyhf import exceptions, schema from pyhf.mixins import _ChannelSummaryMixin from pyhf.pdf import Model -import functools -import operator log = logging.getLogger(__name__) diff --git a/src/pyhf/writexml.py b/src/pyhf/writexml.py index ff7aac800b..b419c8e394 100644 --- a/src/pyhf/writexml.py +++ b/src/pyhf/writexml.py @@ -1,10 +1,9 @@ import logging - -from pathlib import Path import shutil import xml.etree.ElementTree as ET -import numpy as np +from pathlib import Path +import numpy as np import uproot from pyhf.mixins import _ChannelSummaryMixin diff --git a/tests/benchmarks/test_benchmark.py b/tests/benchmarks/test_benchmark.py index 35fbf0c1ec..31399ec6a6 100644 --- a/tests/benchmarks/test_benchmark.py +++ b/tests/benchmarks/test_benchmark.py @@ -1,8 +1,9 @@ -import pyhf -from pyhf.simplemodels import uncorrelated_background import numpy as np import pytest +import pyhf +from pyhf.simplemodels import uncorrelated_background + def generate_source_static(n_bins): """ diff --git a/tests/test_backend_consistency.py b/tests/test_backend_consistency.py index b2bfe5c2ae..586dbd002c 100644 --- a/tests/test_backend_consistency.py +++ b/tests/test_backend_consistency.py @@ -1,7 +1,8 @@ -import pyhf import numpy as np import pytest +import pyhf + def generate_source_static(n_bins): """ diff --git a/tests/test_backends.py b/tests/test_backends.py index dbde580cc1..5c205868e7 100644 --- a/tests/test_backends.py +++ b/tests/test_backends.py @@ -1,7 +1,8 @@ -import pyhf import jax import pytest +import pyhf + def test_default_backend(): pyhf.set_backend("jax", default=True) diff --git a/tests/test_cli.py b/tests/test_cli.py index 8b2bda812c..2bfd57d882 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,7 @@ -from click.testing import CliRunner -import sys import importlib +import sys + +from click.testing import CliRunner def test_shllcomplete_cli(isolate_modules): diff --git a/tests/test_combined_modifiers.py b/tests/test_combined_modifiers.py index 4705847ae8..77b6d65ed2 100644 --- a/tests/test_combined_modifiers.py +++ b/tests/test_combined_modifiers.py @@ -1,18 +1,19 @@ +import numpy as np + +import pyhf from pyhf.modifiers.histosys import histosys_combined -from pyhf.modifiers.normsys import normsys_combined from pyhf.modifiers.lumi import lumi_combined -from pyhf.modifiers.staterror import staterror_combined -from pyhf.modifiers.shapesys import shapesys_combined from pyhf.modifiers.normfactor import normfactor_combined +from pyhf.modifiers.normsys import normsys_combined from pyhf.modifiers.shapefactor import shapefactor_combined +from pyhf.modifiers.shapesys import shapesys_combined +from pyhf.modifiers.staterror import staterror_combined from pyhf.parameters import ( - paramset, - unconstrained, constrained_by_normal, constrained_by_poisson, + paramset, + unconstrained, ) -import numpy as np -import pyhf class MockConfig: diff --git a/tests/test_constraints.py b/tests/test_constraints.py index f097ad92eb..676ff7ada8 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -1,9 +1,10 @@ +import numpy as np import pytest + import pyhf -from pyhf.parameters import constrained_by_poisson, constrained_by_normal -from pyhf.constraints import gaussian_constraint_combined, poisson_constraint_combined from pyhf import default_backend -import numpy as np +from pyhf.constraints import gaussian_constraint_combined, poisson_constraint_combined +from pyhf.parameters import constrained_by_normal, constrained_by_poisson class MockConfig: diff --git a/tests/test_custom_mods.py b/tests/test_custom_mods.py index ca83b0f002..a4cc09e16c 100644 --- a/tests/test_custom_mods.py +++ b/tests/test_custom_mods.py @@ -1,6 +1,7 @@ +import pytest + import pyhf import pyhf.exceptions as exceptions -import pytest class custom_builder: diff --git a/tests/test_events.py b/tests/test_events.py index 2464eef18c..cd70da9d6a 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -1,6 +1,7 @@ -import pyhf.events as events from unittest import mock +import pyhf.events as events + def test_subscribe_event(): ename = 'test' diff --git a/tests/test_import.py b/tests/test_import.py index ad473e144a..8204d79ab5 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -1,13 +1,15 @@ -import pyhf -import pyhf.readxml -import numpy as np -import uproot +import logging +import xml.etree.ElementTree as ET from pathlib import Path + +import numpy as np import pytest -import xml.etree.ElementTree as ET -import logging +import uproot from jsonschema import ValidationError +import pyhf +import pyhf.readxml + def assert_equal_dictionary(d1, d2): "recursively compare 2 dictionaries" diff --git a/tests/test_init.py b/tests/test_init.py index 495b3aeab1..13472e973a 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -1,5 +1,7 @@ -import pytest import sys + +import pytest + import pyhf diff --git a/tests/test_interpolate.py b/tests/test_interpolate.py index d544aee37f..3a4dba71c3 100644 --- a/tests/test_interpolate.py +++ b/tests/test_interpolate.py @@ -1,7 +1,8 @@ -import pyhf import numpy as np import pytest +import pyhf + @pytest.fixture def random_histosets_alphasets_pair(): diff --git a/tests/test_jit.py b/tests/test_jit.py index b3dc665607..bc1cf2f9ce 100644 --- a/tests/test_jit.py +++ b/tests/test_jit.py @@ -1,7 +1,9 @@ -import pyhf -import pytest import logging +import pytest + +import pyhf + @pytest.mark.parametrize( 'return_fitted_val', [False, True], ids=['no_fitval', 'do_fitval'] diff --git a/tests/test_mixins.py b/tests/test_mixins.py index fb3e78bc2d..99e304af8a 100644 --- a/tests/test_mixins.py +++ b/tests/test_mixins.py @@ -1,6 +1,7 @@ +import pytest + import pyhf import pyhf.readxml -import pytest @pytest.fixture( diff --git a/tests/test_optim.py b/tests/test_optim.py index 1caf26862d..f325554eb4 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -1,14 +1,15 @@ -from unittest.mock import patch, PropertyMock +import itertools +from unittest.mock import PropertyMock, patch + +import iminuit +import numpy as np +import pytest +from scipy.optimize import OptimizeResult, OptimizeWarning, minimize + import pyhf -from pyhf.optimize.mixins import OptimizerMixin from pyhf.optimize.common import _get_tensor_shim, _make_stitch_pars +from pyhf.optimize.mixins import OptimizerMixin from pyhf.tensor.common import _TensorViewer -import pytest -from scipy.optimize import minimize, OptimizeResult -from scipy.optimize import OptimizeWarning -import iminuit -import itertools -import numpy as np # from https://docs.scipy.org/doc/scipy/tutorial/optimize.html#nelder-mead-simplex-algorithm-method-nelder-mead diff --git a/tests/test_paramsets.py b/tests/test_paramsets.py index 753baa1cbe..6f450de690 100644 --- a/tests/test_paramsets.py +++ b/tests/test_paramsets.py @@ -1,6 +1,7 @@ -from pyhf.parameters import paramsets import pytest +from pyhf.parameters import paramsets + def test_paramset_unconstrained(): pset = paramsets.unconstrained( diff --git a/tests/test_patchset.py b/tests/test_patchset.py index e6c5a7edab..7adaf1d2b1 100644 --- a/tests/test_patchset.py +++ b/tests/test_patchset.py @@ -1,9 +1,11 @@ -import pyhf +import json +from unittest import mock + import pytest + +import pyhf import pyhf.exceptions import pyhf.patchset -import json -from unittest import mock @pytest.fixture( diff --git a/tests/test_pdf.py b/tests/test_pdf.py index f12e1448ab..805ec05600 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -1,8 +1,10 @@ -import pyhf +import json + +import numpy as np import pytest + +import pyhf import pyhf.exceptions -import numpy as np -import json def test_minimum_model_spec(): diff --git a/tests/test_probability.py b/tests/test_probability.py index 82bf0b9f26..657a39ea87 100644 --- a/tests/test_probability.py +++ b/tests/test_probability.py @@ -1,6 +1,7 @@ -from pyhf import probability import numpy as np +from pyhf import probability + def test_poisson(backend): tb, _ = backend diff --git a/tests/test_public_api.py b/tests/test_public_api.py index d788a3379a..5426ecddef 100644 --- a/tests/test_public_api.py +++ b/tests/test_public_api.py @@ -1,7 +1,9 @@ +import pathlib + +import numpy as np import pytest + import pyhf -import numpy as np -import pathlib @pytest.fixture(scope='function') diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index e7201bf460..2738e3fb95 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -1,4 +1,5 @@ import pytest + import pyhf diff --git a/tests/test_teststats.py b/tests/test_teststats.py index 5d66278012..b9d9b7d001 100644 --- a/tests/test_teststats.py +++ b/tests/test_teststats.py @@ -1,7 +1,9 @@ +import logging + import pytest + import pyhf import pyhf.infer.test_statistics -import logging def test_q0(caplog): diff --git a/tests/test_toys.py b/tests/test_toys.py index 4f338937f9..eddd39f479 100644 --- a/tests/test_toys.py +++ b/tests/test_toys.py @@ -1,6 +1,7 @@ -import pyhf import numpy as np +import pyhf + def test_smoketest_toys(backend): tb, _ = backend diff --git a/tests/test_utils.py b/tests/test_utils.py index 2fb6fb8a4e..a7800a742d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,5 @@ import pytest + import pyhf diff --git a/tests/test_workspace.py b/tests/test_workspace.py index e966d6c3f1..3a4cdb684e 100644 --- a/tests/test_workspace.py +++ b/tests/test_workspace.py @@ -1,13 +1,15 @@ -import pyhf -import pyhf.readxml -import pytest -import pyhf.exceptions +import copy import json import logging -import pyhf.workspace -import pyhf.utils + +import pytest + +import pyhf +import pyhf.exceptions +import pyhf.readxml import pyhf.schema -import copy +import pyhf.utils +import pyhf.workspace @pytest.fixture( diff --git a/validation/manualonoff_roofit/onoff.py b/validation/manualonoff_roofit/onoff.py index ff31acacbe..e30a665d34 100644 --- a/validation/manualonoff_roofit/onoff.py +++ b/validation/manualonoff_roofit/onoff.py @@ -1,4 +1,5 @@ import json + import ROOT with open("data/source.json", encoding="utf-8") as source_file: diff --git a/validation/multichan_coupledhistosys_histfactory/makedata.py b/validation/multichan_coupledhistosys_histfactory/makedata.py index b09298329f..fdabc01721 100644 --- a/validation/multichan_coupledhistosys_histfactory/makedata.py +++ b/validation/multichan_coupledhistosys_histfactory/makedata.py @@ -1,8 +1,8 @@ -import ROOT - import json import sys +import ROOT + with open(sys.argv[1], encoding="utf-8") as source_file: source_data = json.load(source_file) root_file = sys.argv[2] diff --git a/validation/multichan_coupledoverall_histfactory/makedata.py b/validation/multichan_coupledoverall_histfactory/makedata.py index 0b33ff4f55..980b574ddf 100644 --- a/validation/multichan_coupledoverall_histfactory/makedata.py +++ b/validation/multichan_coupledoverall_histfactory/makedata.py @@ -1,8 +1,8 @@ -import ROOT - import json import sys +import ROOT + with open(sys.argv[1], encoding="utf-8") as source_file: source_data = json.load(source_file) root_file = sys.argv[2] diff --git a/validation/multichannel_histfactory/makedata.py b/validation/multichannel_histfactory/makedata.py index cce668912d..b24bfc7228 100644 --- a/validation/multichannel_histfactory/makedata.py +++ b/validation/multichannel_histfactory/makedata.py @@ -1,8 +1,8 @@ -import ROOT - import json import sys +import ROOT + with open(sys.argv[1], encoding="utf-8") as source_file: source_data = json.load(source_file) root_file = sys.argv[2] diff --git a/validation/xmlimport_input2/makedata.py b/validation/xmlimport_input2/makedata.py index b09298329f..fdabc01721 100644 --- a/validation/xmlimport_input2/makedata.py +++ b/validation/xmlimport_input2/makedata.py @@ -1,8 +1,8 @@ -import ROOT - import json import sys +import ROOT + with open(sys.argv[1], encoding="utf-8") as source_file: source_data = json.load(source_file) root_file = sys.argv[2] From 9bde63635e1e9dd14bc77ef52e2fe635cd1200c7 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 11:45:15 -0600 Subject: [PATCH 08/23] docs: Update badge to show Ruff as code style --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 023706d716..fabc6731de 100644 --- a/README.rst +++ b/README.rst @@ -12,7 +12,7 @@ pure-python fitting/limit-setting/interval estimation HistFactory-style |PyPI version| |Conda-forge version| |Supported Python versions| |Docker Hub pyhf| |Docker Hub pyhf CUDA| -|Code Coverage| |CodeFactor| |pre-commit.ci Status| |Code style: black| +|Code Coverage| |CodeFactor| |pre-commit.ci Status| |Code style: ruff| |GitHub Actions Status: CI| |GitHub Actions Status: Docs| |GitHub Actions Status: Publish| |GitHub Actions Status: Docker| @@ -392,8 +392,8 @@ and grant `OAC-1450377 Date: Thu, 23 Oct 2025 11:49:22 -0600 Subject: [PATCH 09/23] Use blacken-docs still --- .pre-commit-config.yaml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a750eec5a9..c32c99088d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,10 +35,17 @@ repos: - id: rst-inline-touching-normal - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.12.0" + rev: "v0.12.11" hooks: - - id: ruff-check - args: ["--fix", "--show-fixes"] + - id: ruff-check + args: ["--fix", "--show-fixes"] + - id: ruff-format + +- repo: https://github.com/adamchainz/blacken-docs + rev: "1.20.0" + hooks: + - id: blacken-docs + additional_dependencies: [black==25.*] - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.13.0 From 4bb61c3a6406dd0a5a88ad3e4c25b7f689c27114 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 11:51:55 -0600 Subject: [PATCH 10/23] Apply ruff format --- docs/conf.py | 260 ++++---- docs/examples/notebooks/ShapeFactor.ipynb | 58 +- docs/examples/notebooks/StatError.ipynb | 52 +- .../binderexample/StatisticalAnalysis.ipynb | 18 +- docs/examples/notebooks/histogrammar.ipynb | 40 +- docs/examples/notebooks/histosys.ipynb | 38 +- docs/examples/notebooks/importxml.ipynb | 16 +- .../notebooks/learn/InterpolationCodes.ipynb | 40 +- .../learn/TensorizingInterpolations.ipynb | 64 +- .../notebooks/learn/UsingCalculators.ipynb | 42 +- docs/examples/notebooks/multiBinPois.ipynb | 32 +- .../multichannel-coupled-normsys.ipynb | 66 +- .../notebooks/multichannel-normsys.ipynb | 54 +- docs/examples/notebooks/normsys.ipynb | 40 +- docs/exts/xref.py | 6 +- src/pyhf/__init__.py | 6 +- src/pyhf/cli/__init__.py | 2 +- src/pyhf/cli/cli.py | 2 +- src/pyhf/cli/complete.py | 14 +- src/pyhf/cli/infer.py | 38 +- src/pyhf/cli/patchset.py | 36 +- src/pyhf/cli/rootio.py | 36 +- src/pyhf/cli/spec.py | 182 +++--- src/pyhf/compat.py | 52 +- src/pyhf/constraints.py | 26 +- src/pyhf/events.py | 5 +- src/pyhf/exceptions/__init__.py | 10 +- src/pyhf/infer/__init__.py | 6 +- src/pyhf/infer/calculators.py | 20 +- src/pyhf/infer/mle.py | 2 +- src/pyhf/infer/test_statistics.py | 28 +- src/pyhf/infer/utils.py | 2 +- src/pyhf/interpolators/__init__.py | 4 +- src/pyhf/interpolators/code0.py | 8 +- src/pyhf/interpolators/code1.py | 14 +- src/pyhf/interpolators/code2.py | 14 +- src/pyhf/interpolators/code4.py | 24 +- src/pyhf/interpolators/code4p.py | 18 +- src/pyhf/mixins.py | 16 +- src/pyhf/modifiers/histosys.py | 56 +- src/pyhf/modifiers/lumi.py | 42 +- src/pyhf/modifiers/normfactor.py | 38 +- src/pyhf/modifiers/normsys.py | 56 +- src/pyhf/modifiers/shapefactor.py | 38 +- src/pyhf/modifiers/shapesys.py | 38 +- src/pyhf/modifiers/staterror.py | 56 +- src/pyhf/optimize/__init__.py | 8 +- src/pyhf/optimize/common.py | 18 +- src/pyhf/optimize/mixins.py | 10 +- src/pyhf/optimize/opt_jax.py | 20 +- src/pyhf/optimize/opt_minuit.py | 16 +- src/pyhf/optimize/opt_scipy.py | 20 +- src/pyhf/parameters/__init__.py | 12 +- src/pyhf/parameters/paramsets.py | 30 +- src/pyhf/parameters/paramview.py | 14 +- src/pyhf/parameters/utils.py | 30 +- src/pyhf/patchset.py | 32 +- src/pyhf/pdf.py | 74 +-- src/pyhf/readxml.py | 224 +++---- src/pyhf/schema/loader.py | 10 +- src/pyhf/schema/variables.py | 4 +- src/pyhf/simplemodels.py | 26 +- src/pyhf/tensor/__init__.py | 6 +- src/pyhf/tensor/common.py | 16 +- src/pyhf/tensor/jax_backend.py | 16 +- src/pyhf/tensor/manager.py | 26 +- src/pyhf/tensor/numpy_backend.py | 17 +- src/pyhf/typing.py | 16 +- src/pyhf/utils.py | 18 +- src/pyhf/workspace.py | 220 +++---- src/pyhf/writexml.py | 188 +++--- tests/benchmarks/test_benchmark.py | 16 +- tests/conftest.py | 28 +- tests/test_backend_consistency.py | 44 +- tests/test_backends.py | 10 +- tests/test_calculator.py | 18 +- tests/test_cli.py | 12 +- tests/test_combined_modifiers.py | 594 +++++++++--------- tests/test_compat.py | 76 +-- tests/test_constraints.py | 96 +-- tests/test_custom_mods.py | 64 +- tests/test_events.py | 26 +- tests/test_export.py | 350 +++++------ tests/test_import.py | 352 +++++------ tests/test_infer.py | 48 +- tests/test_init.py | 16 +- tests/test_interpolate.py | 20 +- tests/test_jit.py | 78 +-- tests/test_mixins.py | 28 +- tests/test_modifiers.py | 94 +-- tests/test_notebooks.py | 26 +- tests/test_optim.py | 210 +++---- tests/test_paramsets.py | 16 +- tests/test_paramviewer.py | 8 +- tests/test_patchset.py | 50 +- tests/test_pdf.py | 470 +++++++------- tests/test_public_api.py | 28 +- tests/test_regression.py | 4 +- tests/test_schema.py | 450 ++++++------- tests/test_scripts.py | 306 ++++----- tests/test_simplemodels.py | 14 +- tests/test_tensor.py | 92 +-- tests/test_tensorviewer.py | 20 +- tests/test_utils.py | 38 +- tests/test_validation.py | 560 ++++++++--------- tests/test_workspace.py | 406 ++++++------ validation/manualonoff_roofit/onoff.py | 50 +- .../makedata.py | 14 +- .../makedata.py | 30 +- .../multichannel_histfactory/makedata.py | 30 +- .../shared_nuispar_across_types/make_data.py | 12 +- validation/standard_hypo_test_demo.py | 2 +- validation/xmlimport_input2/makedata.py | 14 +- 113 files changed, 3805 insertions(+), 3801 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6098bc49e4..a34d1a9805 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ import jupytext from intersphinx_registry import get_intersphinx_mapping -sys.path.insert(0, str(Path('./exts').resolve())) +sys.path.insert(0, str(Path("./exts").resolve())) # Convert jupyterlite example to ipynb docs_dir = Path(__file__).resolve().parent @@ -34,7 +34,7 @@ def setup(app): app.add_css_file( - 'https://cdnjs.cloudflare.com/ajax/libs/github-fork-ribbon-css/0.2.2/gh-fork-ribbon.min.css' + "https://cdnjs.cloudflare.com/ajax/libs/github-fork-ribbon-css/0.2.2/gh-fork-ribbon.min.css" ) @@ -48,22 +48,22 @@ def setup(app): # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', - 'sphinx.ext.intersphinx', - 'sphinxcontrib.bibtex', - 'sphinx.ext.napoleon', - 'sphinx_click', - 'nbsphinx', - 'sphinx_issues', - 'sphinx_copybutton', - 'xref', - 'jupyterlite_sphinx', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.ifconfig", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", + "sphinx.ext.intersphinx", + "sphinxcontrib.bibtex", + "sphinx.ext.napoleon", + "sphinx_click", + "nbsphinx", + "sphinx_issues", + "sphinx_copybutton", + "xref", + "jupyterlite_sphinx", ] bibtex_bibfiles = [ "bib/docs.bib", @@ -83,30 +83,30 @@ def setup(app): intersphinx_mapping = get_intersphinx_mapping( packages={ - 'python', - 'numpy', - 'scipy', - 'matplotlib', - 'iminuit', - 'uproot', - 'jsonpatch', + "python", + "numpy", + "scipy", + "matplotlib", + "iminuit", + "uproot", + "jsonpatch", } ) # GitHub repo -issues_github_path = 'scikit-hep/pyhf' +issues_github_path = "scikit-hep/pyhf" # Generate the API documentation when building autosummary_generate = True numpydoc_show_class_members = False # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] # source_suffix = '.rst' # The encoding of source files. @@ -114,12 +114,12 @@ def setup(app): # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'pyhf' -copyright = '2018, Lukas Heinrich, Matthew Feickert, Giordon Stark' -author = 'Lukas Heinrich, Matthew Feickert, Giordon Stark' +project = "pyhf" +copyright = "2018, Lukas Heinrich, Matthew Feickert, Giordon Stark" +author = "Lukas Heinrich, Matthew Feickert, Giordon Stark" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -127,7 +127,7 @@ def setup(app): # The full version, including alpha/beta/rc tags. release = importlib.metadata.version("pyhf") # for example take major/minor/patch -version = '.'.join(release.split('.')[:3]) +version = ".".join(release.split(".")[:3]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -146,59 +146,59 @@ def setup(app): # today_fmt = '%B %d, %Y' autodoc_mock_imports = [ - 'jax', - 'iminuit', + "jax", + "iminuit", ] _type_aliases_inverted = { - 'pyhf.typing': [ - 'PathOrStr', - 'ParameterBase', - 'Parameter', - 'Measurement', - 'ModifierBase', - 'NormSys', - 'NormFactor', - 'HistoSys', - 'StatError', - 'ShapeSys', - 'ShapeFactor', - 'LumiSys', - 'Modifier', - 'Sample', - 'Channel', - 'Observation', - 'Workspace', - 'Literal', + "pyhf.typing": [ + "PathOrStr", + "ParameterBase", + "Parameter", + "Measurement", + "ModifierBase", + "NormSys", + "NormFactor", + "HistoSys", + "StatError", + "ShapeSys", + "ShapeFactor", + "LumiSys", + "Modifier", + "Sample", + "Channel", + "Observation", + "Workspace", + "Literal", ], - 'numpy.typing': ['ArrayLike', 'DTypeLike', 'NBitBase', 'NDArray'], + "numpy.typing": ["ArrayLike", "DTypeLike", "NBitBase", "NDArray"], } autodoc_type_aliases = { - item: f'{k}.{item}' for k, v in _type_aliases_inverted.items() for item in v + item: f"{k}.{item}" for k, v in _type_aliases_inverted.items() for item in v } -autodoc_typehints_format = 'fully-qualified' +autodoc_typehints_format = "fully-qualified" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [ - '_build', - 'JOSS', - 'lite', - '**.ipynb_checkpoints', - 'examples/experiments/edwardpyhf.ipynb', - 'examples/notebooks/ImpactPlot.ipynb', - 'examples/notebooks/Recast.ipynb', - 'examples/notebooks/StatError.ipynb', - 'examples/notebooks/histogrammar.ipynb', - 'examples/notebooks/histosys.ipynb', - 'examples/notebooks/importxml.ipynb', - 'examples/notebooks/multichannel-coupled-normsys.ipynb', - 'examples/notebooks/multichannel-normsys.ipynb', - 'examples/notebooks/normsys.ipynb', - 'examples/notebooks/pullplot.ipynb', + "_build", + "JOSS", + "lite", + "**.ipynb_checkpoints", + "examples/experiments/edwardpyhf.ipynb", + "examples/notebooks/ImpactPlot.ipynb", + "examples/notebooks/Recast.ipynb", + "examples/notebooks/StatError.ipynb", + "examples/notebooks/histogrammar.ipynb", + "examples/notebooks/histosys.ipynb", + "examples/notebooks/importxml.ipynb", + "examples/notebooks/multichannel-coupled-normsys.ipynb", + "examples/notebooks/multichannel-normsys.ipynb", + "examples/notebooks/normsys.ipynb", + "examples/notebooks/pullplot.ipynb", ] # The reST default role (used for this markup: `text`) to use for all @@ -221,7 +221,7 @@ def setup(app): # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -238,7 +238,7 @@ def setup(app): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'pydata_sphinx_theme' +html_theme = "pydata_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -272,16 +272,16 @@ def setup(app): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] html_css_files = [ - 'css/custom.css', + "css/custom.css", ] html_js_files = [ - 'js/custom.js', + "js/custom.js", ( - 'https://views.scientific-python.org/js/plausible.js', + "https://views.scientific-python.org/js/plausible.js", {"data-domain": "pyhf.readthedocs.io", "defer": "defer"}, ), ] @@ -290,7 +290,7 @@ def setup(app): # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # -html_extra_path = ['_extras'] +html_extra_path = ["_extras"] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. @@ -364,7 +364,7 @@ def setup(app): # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'pyhfdoc' +htmlhelp_basename = "pyhfdoc" # sphinx-copybutton configuration copybutton_prompt_text = r">>> |\.\.\. |\$ " @@ -394,10 +394,10 @@ def setup(app): latex_documents = [ ( master_doc, - 'pyhf.tex', - 'pyhf Documentation', - 'Lukas Heinrich, Matthew Feickert, Giordon Stark', - 'manual', + "pyhf.tex", + "pyhf Documentation", + "Lukas Heinrich, Matthew Feickert, Giordon Stark", + "manual", ) ] @@ -438,7 +438,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'pyhf', 'pyhf Documentation', [author], 1)] +man_pages = [(master_doc, "pyhf", "pyhf Documentation", [author], 1)] # If true, show URL addresses after external links. # @@ -453,12 +453,12 @@ def setup(app): texinfo_documents = [ ( master_doc, - 'pyhf', - 'pyhf Documentation', + "pyhf", + "pyhf Documentation", author, - 'pyhf', - 'One line description of project.', - 'Miscellaneous', + "pyhf", + "One line description of project.", + "Miscellaneous", ) ] @@ -479,63 +479,63 @@ def setup(app): # texinfo_no_detailmenu = False mathjax3_config = { - 'tex2jax': {'inlineMath': [['$', '$'], ['\\(', '\\)']]}, - 'tex': { - 'macros': { - 'bm': ["\\boldsymbol{#1}", 1], # \usepackage{bm}, see mathjax/MathJax#1219 - 'HiFa': r'\texttt{HistFactory}', - 'Root': r'\texttt{ROOT}', - 'RooStats': r'\texttt{RooStats}', - 'RooFit': r'\texttt{RooFit}', - 'pyhf': r'\texttt{pyhf}', - 'CLs': r'\mathrm{CL}_{s}', - 'freeset': r'\bm{\eta}', - 'constrset': r'\bm{\chi}', - 'singleconstr': r'\chi', - 'channelcounts': r'\bm{n}', - 'auxdata': r'\bm{a}', - 'poiset': r'\bm{\psi}', - 'nuisset': r'\bm{\theta}', - 'fullset': r'\bm{\phi}', - 'singlefull': r'\phi', - 'TeV': r'\textrm{TeV}', + "tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]]}, + "tex": { + "macros": { + "bm": ["\\boldsymbol{#1}", 1], # \usepackage{bm}, see mathjax/MathJax#1219 + "HiFa": r"\texttt{HistFactory}", + "Root": r"\texttt{ROOT}", + "RooStats": r"\texttt{RooStats}", + "RooFit": r"\texttt{RooFit}", + "pyhf": r"\texttt{pyhf}", + "CLs": r"\mathrm{CL}_{s}", + "freeset": r"\bm{\eta}", + "constrset": r"\bm{\chi}", + "singleconstr": r"\chi", + "channelcounts": r"\bm{n}", + "auxdata": r"\bm{a}", + "poiset": r"\bm{\psi}", + "nuisset": r"\bm{\theta}", + "fullset": r"\bm{\phi}", + "singlefull": r"\phi", + "TeV": r"\textrm{TeV}", } }, } # c.f. https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder linkcheck_ignore = [ - 'cli.html#pyhf-xml2json', + "cli.html#pyhf-xml2json", # https://doi.org/10.31526/lhep.2020.158 is causing linkcheck connection timeouts in CI - r'https://doi\.org/10\.31526/.*', + r"https://doi\.org/10\.31526/.*", # https://doi.org/10.1051/epjconf/x DOI URLs will periodically generate 500 Server Error - r'https://doi\.org/10\.1051/epjconf/.*', + r"https://doi\.org/10\.1051/epjconf/.*", # https://indico.desy.de/event/22731/contributions/47953/ is frequently generating 403 Client Error - r'https://indico.desy.de/event/22731/.*', + r"https://indico.desy.de/event/22731/.*", # https://indico.belle2.org/event/8470/contributions/55871/ is frequently generating 403 Client Error - r'https://indico.belle2.org/event/8470/.*', + r"https://indico.belle2.org/event/8470/.*", # https://doi.org/10.1142/S0217732321020016 is frequently generating 403 Client Error - r'https://doi\.org/10.1142/.*', + r"https://doi\.org/10.1142/.*", # CERN doesn't maintain its SSL certs well enough to not have SSLErrors - r'https://twiki.cern.ch/.*', + r"https://twiki.cern.ch/.*", # tags for a release won't exist until it is made, but the release notes # and ReadTheDocs need to reference them - r'https://github.com/scikit-hep/pyhf/releases/tag/.*', - r'https://pyhf.readthedocs.io/en/.*', + r"https://github.com/scikit-hep/pyhf/releases/tag/.*", + r"https://pyhf.readthedocs.io/en/.*", # the following are 403s as they map to journals.aps.org or academic.oup.com - r'https://doi.org/10.1093/ptep/ptad144', - r'https://doi.org/10.1103/PhysRevD.104.055017', - r'https://doi.org/10.1103/PhysRevD.107.095021', - r'https://doi.org/10.1103/PhysRevD.108.016002', - r'https://doi.org/10.1103/PhysRevD.106.032005', - r'https://doi.org/10.1103/PhysRevLett.127.181802', - r'https://doi.org/10.1103/PhysRevLett.130.231801', - r'https://doi.org/10.1103/PhysRevLett.131.211802', + r"https://doi.org/10.1093/ptep/ptad144", + r"https://doi.org/10.1103/PhysRevD.104.055017", + r"https://doi.org/10.1103/PhysRevD.107.095021", + r"https://doi.org/10.1103/PhysRevD.108.016002", + r"https://doi.org/10.1103/PhysRevD.106.032005", + r"https://doi.org/10.1103/PhysRevLett.127.181802", + r"https://doi.org/10.1103/PhysRevLett.130.231801", + r"https://doi.org/10.1103/PhysRevLett.131.211802", # FNAL blocks GitHub Actions? - r'https://indico.fnal.gov/event/17566/contributions/44103/', - r'https://indico.fnal.gov/event/17566/session/0/contribution/99', + r"https://indico.fnal.gov/event/17566/contributions/44103/", + r"https://indico.fnal.gov/event/17566/session/0/contribution/99", # GitHub anchor links don't work - r'https://github.com/scikit-hep/pyhf/issues/850#issuecomment-1239975121', + r"https://github.com/scikit-hep/pyhf/issues/850#issuecomment-1239975121", ] linkcheck_retries = 50 diff --git a/docs/examples/notebooks/ShapeFactor.ipynb b/docs/examples/notebooks/ShapeFactor.ipynb index c387a96a3a..5942d055ea 100644 --- a/docs/examples/notebooks/ShapeFactor.ipynb +++ b/docs/examples/notebooks/ShapeFactor.ipynb @@ -33,41 +33,41 @@ "source": [ "def prep_data(sourcedata):\n", " spec = {\n", - " 'channels': [\n", + " \"channels\": [\n", " {\n", - " 'name': 'signal',\n", - " 'samples': [\n", + " \"name\": \"signal\",\n", + " \"samples\": [\n", " {\n", - " 'name': 'signal',\n", - " 'data': sourcedata['signal']['bindata']['sig'],\n", - " 'modifiers': [\n", - " {'name': 'mu', 'type': 'normfactor', 'data': None}\n", + " \"name\": \"signal\",\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"sig\"],\n", + " \"modifiers\": [\n", + " {\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}\n", " ],\n", " },\n", " {\n", - " 'name': 'bkg1',\n", - " 'data': sourcedata['signal']['bindata']['bkg1'],\n", - " 'modifiers': [\n", + " \"name\": \"bkg1\",\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"bkg1\"],\n", + " \"modifiers\": [\n", " {\n", - " 'name': 'coupled_shapefactor',\n", - " 'type': 'shapefactor',\n", - " 'data': None,\n", + " \"name\": \"coupled_shapefactor\",\n", + " \"type\": \"shapefactor\",\n", + " \"data\": None,\n", " }\n", " ],\n", " },\n", " ],\n", " },\n", " {\n", - " 'name': 'control',\n", - " 'samples': [\n", + " \"name\": \"control\",\n", + " \"samples\": [\n", " {\n", - " 'name': 'background',\n", - " 'data': sourcedata['control']['bindata']['bkg1'],\n", - " 'modifiers': [\n", + " \"name\": \"background\",\n", + " \"data\": sourcedata[\"control\"][\"bindata\"][\"bkg1\"],\n", + " \"modifiers\": [\n", " {\n", - " 'name': 'coupled_shapefactor',\n", - " 'type': 'shapefactor',\n", - " 'data': None,\n", + " \"name\": \"coupled_shapefactor\",\n", + " \"type\": \"shapefactor\",\n", + " \"data\": None,\n", " }\n", " ],\n", " }\n", @@ -78,7 +78,7 @@ " pdf = pyhf.Model(spec, poi_name=\"mu\")\n", " data = []\n", " for channel in pdf.config.channels:\n", - " data += sourcedata[channel]['bindata']['data']\n", + " data += sourcedata[channel][\"bindata\"][\"data\"]\n", " data = data + pdf.config.auxdata\n", " return data, pdf" ] @@ -124,11 +124,11 @@ " }\n", "}\n", "\n", - "data, pdf = prep_data(source['channels'])\n", - "print(f'data: {data}')\n", + "data, pdf = prep_data(source[\"channels\"])\n", + "print(f\"data: {data}\")\n", "\n", "init_pars = pdf.config.suggested_init()\n", - "print(f'expected data: {pdf.expected_data(init_pars)}')\n", + "print(f\"expected data: {pdf.expected_data(init_pars)}\")\n", "\n", "par_bounds = pdf.config.suggested_bounds()" ] @@ -156,10 +156,10 @@ } ], "source": [ - "print(f'initialization parameters: {pdf.config.suggested_init()}')\n", + "print(f\"initialization parameters: {pdf.config.suggested_init()}\")\n", "\n", "unconpars = pyhf.infer.mle.fit(data, pdf)\n", - "print(f'parameters post unconstrained fit: {unconpars}')" + "print(f\"parameters post unconstrained fit: {unconpars}\")" ] }, { @@ -215,8 +215,8 @@ "source": [ "fig, ax = plt.subplots(figsize=(10, 7))\n", "artists = brazil.plot_results(poi_tests, tests, test_size=0.05, ax=ax)\n", - "print(f'expected upper limits: {exp_limits}')\n", - "print(f'observed upper limit : {obs_limit}')" + "print(f\"expected upper limits: {exp_limits}\")\n", + "print(f\"observed upper limit : {obs_limit}\")" ] } ], diff --git a/docs/examples/notebooks/StatError.ipynb b/docs/examples/notebooks/StatError.ipynb index 95f013e5c9..cc234fd280 100644 --- a/docs/examples/notebooks/StatError.ipynb +++ b/docs/examples/notebooks/StatError.ipynb @@ -25,38 +25,38 @@ "outputs": [], "source": [ "spec = {\n", - " 'channels': [\n", + " \"channels\": [\n", " {\n", - " 'name': 'firstchannel',\n", - " 'samples': [\n", + " \"name\": \"firstchannel\",\n", + " \"samples\": [\n", " {\n", - " 'name': 'mu',\n", - " 'data': [10.0, 10.0],\n", - " 'modifiers': [{'name': 'mu', 'type': 'normfactor', 'data': None}],\n", + " \"name\": \"mu\",\n", + " \"data\": [10.0, 10.0],\n", + " \"modifiers\": [{\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}],\n", " },\n", " {\n", - " 'name': 'bkg1',\n", - " 'data': [50.0, 70.0],\n", - " 'modifiers': [\n", + " \"name\": \"bkg1\",\n", + " \"data\": [50.0, 70.0],\n", + " \"modifiers\": [\n", " {\n", - " 'name': 'stat_firstchannel',\n", - " 'type': 'staterror',\n", - " 'data': [10.0, 10.0],\n", + " \"name\": \"stat_firstchannel\",\n", + " \"type\": \"staterror\",\n", + " \"data\": [10.0, 10.0],\n", " }\n", " ],\n", " },\n", " {\n", - " 'name': 'bkg2',\n", - " 'data': [30.0, 20.0],\n", - " 'modifiers': [\n", + " \"name\": \"bkg2\",\n", + " \"data\": [30.0, 20.0],\n", + " \"modifiers\": [\n", " {\n", - " 'name': 'stat_firstchannel',\n", - " 'type': 'staterror',\n", - " 'data': [5.0, 5.0],\n", + " \"name\": \"stat_firstchannel\",\n", + " \"type\": \"staterror\",\n", + " \"data\": [5.0, 5.0],\n", " }\n", " ],\n", " },\n", - " {'name': 'bkg3', 'data': [20.0, 15.0], 'modifiers': []},\n", + " {\"name\": \"bkg3\", \"data\": [20.0, 15.0], \"modifiers\": []},\n", " ],\n", " },\n", " # {\n", @@ -187,7 +187,7 @@ } ], "source": [ - "p.spec['channels'][0]['samples'][1]" + "p.spec[\"channels\"][0][\"samples\"][1]" ] }, { @@ -205,25 +205,25 @@ "\n", "\n", "def invert_interval(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " point05cross = {'exp': [], 'obs': None}\n", + " point05cross = {\"exp\": [], \"obs\": None}\n", " for cls_exp_sigma in cls_exp:\n", " yvals = [x for x in cls_exp_sigma]\n", - " point05cross['exp'].append(\n", + " point05cross[\"exp\"].append(\n", " np.interp(test_size, list(reversed(yvals)), list(reversed(testmus)))\n", " )\n", "\n", " yvals = cls_obs\n", - " point05cross['obs'] = np.interp(\n", + " point05cross[\"obs\"] = np.interp(\n", " test_size, list(reversed(yvals)), list(reversed(testmus))\n", " )\n", " return point05cross\n", "\n", "\n", "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(mutests, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(mutests, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(mutests, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)" ] }, diff --git a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb index 7eccfd456b..6915f5378a 100644 --- a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb +++ b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb @@ -60,7 +60,7 @@ } ], "source": [ - "anim = base64.b64encode(open('workflow.gif', 'rb').read()).decode('ascii')\n", + "anim = base64.b64encode(open(\"workflow.gif\", \"rb\").read()).decode(\"ascii\")\n", "HTML(f'')" ] }, @@ -79,7 +79,7 @@ "metadata": {}, "outputs": [], "source": [ - "spec = pyhf.readxml.parse('meas.xml', Path.cwd())\n", + "spec = pyhf.readxml.parse(\"meas.xml\", Path.cwd())\n", "workspace = pyhf.Workspace(spec)" ] }, @@ -117,10 +117,10 @@ } ], "source": [ - "pdf = workspace.model(measurement_name='meas')\n", + "pdf = workspace.model(measurement_name=\"meas\")\n", "data = workspace.data(pdf)\n", "# what is the measurement?\n", - "workspace.get_measurement(measurement_name='meas')" + "workspace.get_measurement(measurement_name=\"meas\")" ] }, { @@ -147,8 +147,8 @@ } ], "source": [ - "print(f'Samples:\\n {pdf.config.samples}')\n", - "print(f'Parameters:\\n {pdf.config.parameters}')" + "print(f\"Samples:\\n {pdf.config.samples}\")\n", + "print(f\"Parameters:\\n {pdf.config.parameters}\")" ] }, { @@ -2266,13 +2266,13 @@ "fig, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=True, sharex=True)\n", "fig.set_size_inches(18, 4)\n", "ax1.set_ylim(0, 1.5 * np.max(workspace.data(pdf, include_auxdata=False)))\n", - "ax1.set_title('nominal signal + background µ = 1')\n", + "ax1.set_title(\"nominal signal + background µ = 1\")\n", "plot(ax=ax1, **{k: nominal[v] for k, v in par_name_dict.items()})\n", "\n", - "ax2.set_title('nominal background-only µ = 0')\n", + "ax2.set_title(\"nominal background-only µ = 0\")\n", "plot(ax=ax2, **{k: background_only[v] for k, v in par_name_dict.items()})\n", "\n", - "ax3.set_title(f'best fit µ = {best_fit[pdf.config.poi_index]:.3g}')\n", + "ax3.set_title(f\"best fit µ = {best_fit[pdf.config.poi_index]:.3g}\")\n", "plot(ax=ax3, **{k: best_fit[v] for k, v in par_name_dict.items()});" ] }, diff --git a/docs/examples/notebooks/histogrammar.ipynb b/docs/examples/notebooks/histogrammar.ipynb index fb723ff786..5b58c39213 100644 --- a/docs/examples/notebooks/histogrammar.ipynb +++ b/docs/examples/notebooks/histogrammar.ipynb @@ -96,9 +96,9 @@ } ], "source": [ - "print(background_histogram.toJson()['data']['values'])\n", - "print(signal_histogram.toJson()['data']['values'])\n", - "print(data_histogram.toJson()['data']['values'])" + "print(background_histogram.toJson()[\"data\"][\"values\"])\n", + "print(signal_histogram.toJson()[\"data\"][\"values\"])\n", + "print(data_histogram.toJson()[\"data\"][\"values\"])" ] }, { @@ -131,18 +131,18 @@ "outputs": [], "source": [ "spec = {\n", - " 'singlechannel': {\n", - " 'signal': {\n", - " 'data': signal_histogram.toJson()['data']['values'],\n", - " 'mods': [{'name': 'mu', 'type': 'normfactor', 'data': None}],\n", + " \"singlechannel\": {\n", + " \"signal\": {\n", + " \"data\": signal_histogram.toJson()[\"data\"][\"values\"],\n", + " \"mods\": [{\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}],\n", " },\n", - " 'background': {\n", - " 'data': background_histogram.toJson()['data']['values'],\n", - " 'mods': [\n", + " \"background\": {\n", + " \"data\": background_histogram.toJson()[\"data\"][\"values\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'bkg_norm',\n", - " 'type': 'normsys',\n", - " 'data': {'lo': 0.90, 'hi': 1.10},\n", + " \"name\": \"bkg_norm\",\n", + " \"type\": \"normsys\",\n", + " \"data\": {\"lo\": 0.90, \"hi\": 1.10},\n", " }\n", " ],\n", " },\n", @@ -158,7 +158,7 @@ "source": [ "p = pyhf.Model(spec, poi_name=\"mu\")\n", "\n", - "data = data_histogram.toJson()['data']['values'] + p.config.auxdata" + "data = data_histogram.toJson()[\"data\"][\"values\"] + p.config.auxdata" ] }, { @@ -177,25 +177,25 @@ "outputs": [], "source": [ "def invert_interval(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " point05cross = {'exp': [], 'obs': None}\n", + " point05cross = {\"exp\": [], \"obs\": None}\n", " for cls_exp_sigma in cls_exp:\n", " yvals = [x for x in cls_exp_sigma]\n", - " point05cross['exp'].append(\n", + " point05cross[\"exp\"].append(\n", " np.interp(test_size, list(reversed(yvals)), list(reversed(testmus)))\n", " )\n", "\n", " yvals = cls_obs\n", - " point05cross['obs'] = np.interp(\n", + " point05cross[\"obs\"] = np.interp(\n", " test_size, list(reversed(yvals)), list(reversed(testmus))\n", " )\n", " return point05cross\n", "\n", "\n", "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(mutests, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(mutests, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(mutests, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)" ] }, diff --git a/docs/examples/notebooks/histosys.ipynb b/docs/examples/notebooks/histosys.ipynb index 83192fc2e2..c6c6a2fc08 100644 --- a/docs/examples/notebooks/histosys.ipynb +++ b/docs/examples/notebooks/histosys.ipynb @@ -36,20 +36,20 @@ "\n", "def prep_data(source):\n", " spec = {\n", - " 'singlechannel': {\n", - " 'signal': {\n", - " 'data': source['bindata']['sig'],\n", - " 'mods': [{'name': 'mu', 'type': 'normfactor', 'data': None}],\n", + " \"singlechannel\": {\n", + " \"signal\": {\n", + " \"data\": source[\"bindata\"][\"sig\"],\n", + " \"mods\": [{\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}],\n", " },\n", - " 'background': {\n", - " 'data': source['bindata']['bkg'],\n", - " 'mods': [\n", + " \"background\": {\n", + " \"data\": source[\"bindata\"][\"bkg\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'bkg_norm',\n", - " 'type': 'histosys',\n", - " 'data': {\n", - " 'lo_hist': source['bindata']['bkgsys_dn'],\n", - " 'hi_hist': source['bindata']['bkgsys_up'],\n", + " \"name\": \"bkg_norm\",\n", + " \"type\": \"histosys\",\n", + " \"data\": {\n", + " \"lo_hist\": source[\"bindata\"][\"bkgsys_dn\"],\n", + " \"hi_hist\": source[\"bindata\"][\"bkgsys_up\"],\n", " },\n", " }\n", " ],\n", @@ -57,7 +57,7 @@ " }\n", " }\n", " pdf = Model(spec, poi_name=\"mu\")\n", - " data = source['bindata']['data'] + pdf.config.auxdata\n", + " data = source[\"bindata\"][\"data\"] + pdf.config.auxdata\n", " return data, pdf" ] }, @@ -149,23 +149,23 @@ ], "source": [ "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(mutests, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(mutests, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(mutests, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)\n", "\n", "\n", "def invert_interval(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " point05cross = {'exp': [], 'obs': None}\n", + " point05cross = {\"exp\": [], \"obs\": None}\n", " for cls_exp_sigma in cls_exp:\n", " yvals = [x for x in cls_exp_sigma]\n", - " point05cross['exp'].append(\n", + " point05cross[\"exp\"].append(\n", " np.interp(test_size, list(reversed(yvals)), list(reversed(testmus)))\n", " )\n", "\n", " yvals = cls_obs\n", - " point05cross['obs'] = np.interp(\n", + " point05cross[\"obs\"] = np.interp(\n", " test_size, list(reversed(yvals)), list(reversed(testmus))\n", " )\n", " return point05cross\n", diff --git a/docs/examples/notebooks/importxml.ipynb b/docs/examples/notebooks/importxml.ipynb index a1af36ea1c..31ed82c959 100644 --- a/docs/examples/notebooks/importxml.ipynb +++ b/docs/examples/notebooks/importxml.ipynb @@ -47,10 +47,10 @@ "reload(pyhf.readxml)\n", "\n", "spec = pyhf.readxml.parse(\n", - " '../../validation/xmlimport_input/config/example.xml',\n", - " '../../validation/xmlimport_input/',\n", + " \"../../validation/xmlimport_input/config/example.xml\",\n", + " \"../../validation/xmlimport_input/\",\n", ")\n", - "pdf = pyhf.Model(spec['channels'])\n", + "pdf = pyhf.Model(spec[\"channels\"])\n", "pdf" ] }, @@ -72,7 +72,7 @@ ], "source": [ "data = [\n", - " binvalue for k in pdf.config.channel_order for binvalue in spec['data'][k]\n", + " binvalue for k in pdf.config.channel_order for binvalue in spec[\"data\"][k]\n", "] + pdf.auxdata\n", "data" ] @@ -150,7 +150,7 @@ "disassemble(pdf, pars)\n", "\n", "\n", - "spec['channels']['channel1']['signal']['data']" + "spec[\"channels\"][\"channel1\"][\"signal\"][\"data\"]" ] }, { @@ -190,10 +190,10 @@ "\n", "\n", "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(testmus, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(testmus, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(testmus, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)\n", "\n", "\n", diff --git a/docs/examples/notebooks/learn/InterpolationCodes.ipynb b/docs/examples/notebooks/learn/InterpolationCodes.ipynb index 07868c080f..690643509a 100644 --- a/docs/examples/notebooks/learn/InterpolationCodes.ipynb +++ b/docs/examples/notebooks/learn/InterpolationCodes.ipynb @@ -148,10 +148,10 @@ } ], "source": [ - "plt.plot(alphas, [nom_1 + delta for delta in deltas], linestyle='--')\n", - "plt.scatter((-1, 0, 1), (down_1, nom_1, up_1), color='k')\n", - "plt.xlabel(r'$\\alpha_\\mathrm{JES}$')\n", - "plt.ylabel(r'Events')" + "plt.plot(alphas, [nom_1 + delta for delta in deltas], linestyle=\"--\")\n", + "plt.scatter((-1, 0, 1), (down_1, nom_1, up_1), color=\"k\")\n", + "plt.xlabel(r\"$\\alpha_\\mathrm{JES}$\")\n", + "plt.ylabel(r\"Events\")" ] }, { @@ -210,13 +210,13 @@ "def plot_measurements(down_hist, nom_hist, up_hist):\n", " bincenters = np.arange(len(nom_hist))\n", " for i, h in enumerate(zip(up_hist, nom_hist, down_hist)):\n", - " plt.scatter([i] * len(h), h, color='k', alpha=0.5)\n", + " plt.scatter([i] * len(h), h, color=\"k\", alpha=0.5)\n", "\n", - " for c, h in zip(['r', 'k', 'b'], [down_hist, nom_hist, up_hist]):\n", - " plt.plot(bincenters, h, color=c, linestyle='-', alpha=0.5)\n", + " for c, h in zip([\"r\", \"k\", \"b\"], [down_hist, nom_hist, up_hist]):\n", + " plt.plot(bincenters, h, color=c, linestyle=\"-\", alpha=0.5)\n", "\n", - " plt.xlabel('Bin index in histogram')\n", - " plt.ylabel('Events')\n", + " plt.xlabel(\"Bin index in histogram\")\n", + " plt.ylabel(\"Events\")\n", "\n", "\n", "plot_measurements(down_hist, nom_hist, up_hist)" @@ -254,7 +254,7 @@ " ]\n", "\n", " plot_measurements(down_hist, nom_hist, up_hist)\n", - " plt.plot(bincenters, interpolated_vals, color='g', linestyle='--')\n", + " plt.plot(bincenters, interpolated_vals, color=\"g\", linestyle=\"--\")\n", "\n", "\n", "plot_interpolated_histogram(0.5, down_hist, nom_hist, up_hist)" @@ -416,31 +416,31 @@ " z = np.asarray(at_alphas)\n", " bottom = np.zeros_like(x)\n", " fig = plt.figure(figsize=(10, 10))\n", - " ax1 = fig.add_subplot(111, projection='3d')\n", + " ax1 = fig.add_subplot(111, projection=\"3d\")\n", " ax1.plot_wireframe(x, y, z, alpha=0.3)\n", "\n", " x, y = np.meshgrid(bincenters, [alpha])\n", " z = interpolate_alpha_range([alpha], down_hist, nom_hist, up_hist)\n", "\n", - " ax1.plot_wireframe(x, y, z, edgecolor='g', linestyle='--')\n", + " ax1.plot_wireframe(x, y, z, edgecolor=\"g\", linestyle=\"--\")\n", " ax1.set_xlim(0, 10)\n", " ax1.set_ylim(-1.0, 1.5)\n", " ax1.set_zlim(0, 25)\n", " ax1.view_init(azim=-125)\n", - " ax1.set_xlabel('Bin Index')\n", - " ax1.set_ylabel(r'$\\alpha_\\mathrm{JES}$')\n", - " ax1.set_zlabel('Events')\n", + " ax1.set_xlabel(\"Bin Index\")\n", + " ax1.set_ylabel(r\"$\\alpha_\\mathrm{JES}$\")\n", + " ax1.set_zlabel(\"Events\")\n", "\n", " # add in 2D plot goodness\n", "\n", " for c, h, zs in zip(\n", - " ['r', 'k', 'b'], [down_hist, nom_hist, up_hist], [-1.0, 0.0, 1.0]\n", + " [\"r\", \"k\", \"b\"], [down_hist, nom_hist, up_hist], [-1.0, 0.0, 1.0]\n", " ):\n", - " ax1.plot(bincenters, h, color=c, linestyle='-', alpha=0.5, zdir='y', zs=zs)\n", - " ax1.plot(bincenters, h, color=c, linestyle='-', alpha=0.25, zdir='y', zs=1.5)\n", + " ax1.plot(bincenters, h, color=c, linestyle=\"-\", alpha=0.5, zdir=\"y\", zs=zs)\n", + " ax1.plot(bincenters, h, color=c, linestyle=\"-\", alpha=0.25, zdir=\"y\", zs=1.5)\n", "\n", - " ax1.plot(bincenters, z.T, color='g', linestyle='--', zdir='y', zs=alpha)\n", - " ax1.plot(bincenters, z.T, color='g', linestyle='--', alpha=0.5, zdir='y', zs=1.5)\n", + " ax1.plot(bincenters, z.T, color=\"g\", linestyle=\"--\", zdir=\"y\", zs=alpha)\n", + " ax1.plot(bincenters, z.T, color=\"g\", linestyle=\"--\", alpha=0.5, zdir=\"y\", zs=1.5)\n", "\n", " plt.show()\n", "\n", diff --git a/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb b/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb index ada2ead5f9..5e7867307b 100644 --- a/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb +++ b/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb @@ -589,13 +589,13 @@ "\n", " for nh, histo in enumerate(histoset):\n", " alphas_times_deltas_up = np.einsum(\n", - " 'i,j->ij', alphaset, all_histo_deltas_up[nh]\n", + " \"i,j->ij\", alphaset, all_histo_deltas_up[nh]\n", " )\n", " alphas_times_deltas_dn = np.einsum(\n", - " 'i,j->ij', alphaset, all_histo_deltas_dn[nh]\n", + " \"i,j->ij\", alphaset, all_histo_deltas_dn[nh]\n", " )\n", " masks = np.einsum(\n", - " 'i,j->ij', alphaset > 0, np.ones_like(all_histo_deltas_dn[nh])\n", + " \"i,j->ij\", alphaset > 0, np.ones_like(all_histo_deltas_dn[nh])\n", " )\n", "\n", " alpha_deltas = np.where(\n", @@ -721,13 +721,13 @@ " allset_all_histo_nom = histogramssets[:, :, 1]\n", "\n", " allsets_all_histos_alphas_times_deltas_up = np.einsum(\n", - " 'sa,shb->shab', alphasets, allset_all_histo_deltas_up\n", + " \"sa,shb->shab\", alphasets, allset_all_histo_deltas_up\n", " )\n", " allsets_all_histos_alphas_times_deltas_dn = np.einsum(\n", - " 'sa,shb->shab', alphasets, allset_all_histo_deltas_dn\n", + " \"sa,shb->shab\", alphasets, allset_all_histo_deltas_dn\n", " )\n", " allsets_all_histos_masks = np.einsum(\n", - " 'sa,s...u->s...au', alphasets > 0, np.ones_like(allset_all_histo_deltas_dn)\n", + " \"sa,s...u->s...au\", alphasets > 0, np.ones_like(allset_all_histo_deltas_dn)\n", " )\n", "\n", " allsets_all_histos_deltas = np.where(\n", @@ -851,13 +851,13 @@ " allset_all_histo_nom = histogramssets[:, :, 1]\n", "\n", " allsets_all_histos_alphas_times_deltas_up = np.einsum(\n", - " 'sa,shb->shab', alphasets, allset_all_histo_deltas_up\n", + " \"sa,shb->shab\", alphasets, allset_all_histo_deltas_up\n", " )\n", " allsets_all_histos_alphas_times_deltas_dn = np.einsum(\n", - " 'sa,shb->shab', alphasets, allset_all_histo_deltas_dn\n", + " \"sa,shb->shab\", alphasets, allset_all_histo_deltas_dn\n", " )\n", " allsets_all_histos_masks = np.einsum(\n", - " 'sa,s...u->s...au', alphasets > 0, np.ones_like(allset_all_histo_deltas_dn)\n", + " \"sa,s...u->s...au\", alphasets > 0, np.ones_like(allset_all_histo_deltas_dn)\n", " )\n", "\n", " allsets_all_histos_deltas = np.where(\n", @@ -872,7 +872,7 @@ " all_histos_deltas = allsets_all_histos_deltas[nset]\n", " noms = histogramssets[nset, :, 1]\n", "\n", - " all_histos_noms_repeated = np.einsum('a,hn->han', np.ones_like(alphaset), noms)\n", + " all_histos_noms_repeated = np.einsum(\"a,hn->han\", np.ones_like(alphaset), noms)\n", "\n", " set_result = all_histos_deltas + all_histos_noms_repeated\n", " all_results.append(set_result)\n", @@ -967,13 +967,13 @@ " # x is dummy index\n", "\n", " allsets_allhistos_alphas_times_deltas_up = np.einsum(\n", - " 'sa,shb->shab', alphasets, allset_allhisto_deltas_up\n", + " \"sa,shb->shab\", alphasets, allset_allhisto_deltas_up\n", " )\n", " allsets_allhistos_alphas_times_deltas_dn = np.einsum(\n", - " 'sa,shb->shab', alphasets, allset_allhisto_deltas_dn\n", + " \"sa,shb->shab\", alphasets, allset_allhisto_deltas_dn\n", " )\n", " allsets_allhistos_masks = np.einsum(\n", - " 'sa,sxu->sxau',\n", + " \"sa,sxu->sxau\",\n", " np.where(alphasets > 0, np.ones(alphasets.shape), np.zeros(alphasets.shape)),\n", " np.ones(allset_allhisto_deltas_dn.shape),\n", " )\n", @@ -984,7 +984,7 @@ " allsets_allhistos_alphas_times_deltas_dn,\n", " )\n", " allsets_allhistos_noms_repeated = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_allhisto_nom\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_allhisto_nom\n", " )\n", " set_results = allsets_allhistos_deltas + allsets_allhistos_noms_repeated\n", " return set_results" @@ -1186,17 +1186,17 @@ " for nh, histo in enumerate(histoset):\n", " # bases and exponents need to have an outer product, to essentially tile or repeat over rows/cols\n", " bases_up = np.einsum(\n", - " 'a,b->ab', np.ones(alphaset.shape), all_histo_deltas_up[nh]\n", + " \"a,b->ab\", np.ones(alphaset.shape), all_histo_deltas_up[nh]\n", " )\n", " bases_dn = np.einsum(\n", - " 'a,b->ab', np.ones(alphaset.shape), all_histo_deltas_dn[nh]\n", + " \"a,b->ab\", np.ones(alphaset.shape), all_histo_deltas_dn[nh]\n", " )\n", " exponents = np.einsum(\n", - " 'a,b->ab', np.abs(alphaset), np.ones(all_histo_deltas_up[nh].shape)\n", + " \"a,b->ab\", np.abs(alphaset), np.ones(all_histo_deltas_up[nh].shape)\n", " )\n", "\n", " masks = np.einsum(\n", - " 'a,b->ab', alphaset > 0, np.ones(all_histo_deltas_dn[nh].shape)\n", + " \"a,b->ab\", alphaset > 0, np.ones(all_histo_deltas_dn[nh].shape)\n", " )\n", " bases = np.where(masks, bases_up, bases_dn)\n", " alpha_deltas = np.power(bases, exponents)\n", @@ -1218,17 +1218,17 @@ " )\n", "\n", " bases_up = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_deltas_up\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_deltas_up\n", " )\n", " bases_dn = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_deltas_dn\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_deltas_dn\n", " )\n", " exponents = np.einsum(\n", - " 'sa,shb->shab', np.abs(alphasets), np.ones(allset_all_histo_deltas_up.shape)\n", + " \"sa,shb->shab\", np.abs(alphasets), np.ones(allset_all_histo_deltas_up.shape)\n", " )\n", "\n", " masks = np.einsum(\n", - " 'sa,shb->shab', alphasets > 0, np.ones(allset_all_histo_deltas_up.shape)\n", + " \"sa,shb->shab\", alphasets > 0, np.ones(allset_all_histo_deltas_up.shape)\n", " )\n", " bases = np.where(masks, bases_up, bases_dn)\n", "\n", @@ -1256,17 +1256,17 @@ " )\n", "\n", " bases_up = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_deltas_up\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_deltas_up\n", " )\n", " bases_dn = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_deltas_dn\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_deltas_dn\n", " )\n", " exponents = np.einsum(\n", - " 'sa,shb->shab', np.abs(alphasets), np.ones(allset_all_histo_deltas_up.shape)\n", + " \"sa,shb->shab\", np.abs(alphasets), np.ones(allset_all_histo_deltas_up.shape)\n", " )\n", "\n", " masks = np.einsum(\n", - " 'sa,shb->shab', alphasets > 0, np.ones(allset_all_histo_deltas_up.shape)\n", + " \"sa,shb->shab\", alphasets > 0, np.ones(allset_all_histo_deltas_up.shape)\n", " )\n", " bases = np.where(masks, bases_up, bases_dn)\n", "\n", @@ -1276,7 +1276,7 @@ " for nset, (_, alphaset) in enumerate(zip(histogramssets, alphasets)):\n", " all_histos_deltas = allsets_all_histos_deltas[nset]\n", " noms = allset_all_histo_nom[nset]\n", - " all_histos_noms_repeated = np.einsum('a,hn->han', np.ones_like(alphaset), noms)\n", + " all_histos_noms_repeated = np.einsum(\"a,hn->han\", np.ones_like(alphaset), noms)\n", " set_result = all_histos_deltas * all_histos_noms_repeated\n", " all_results.append(set_result)\n", " return all_results\n", @@ -1294,23 +1294,23 @@ " )\n", "\n", " bases_up = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_deltas_up\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_deltas_up\n", " )\n", " bases_dn = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_deltas_dn\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_deltas_dn\n", " )\n", " exponents = np.einsum(\n", - " 'sa,shb->shab', np.abs(alphasets), np.ones(allset_all_histo_deltas_up.shape)\n", + " \"sa,shb->shab\", np.abs(alphasets), np.ones(allset_all_histo_deltas_up.shape)\n", " )\n", "\n", " masks = np.einsum(\n", - " 'sa,shb->shab', alphasets > 0, np.ones(allset_all_histo_deltas_up.shape)\n", + " \"sa,shb->shab\", alphasets > 0, np.ones(allset_all_histo_deltas_up.shape)\n", " )\n", " bases = np.where(masks, bases_up, bases_dn)\n", "\n", " allsets_all_histos_deltas = np.power(bases, exponents)\n", " allsets_allhistos_noms_repeated = np.einsum(\n", - " 'sa,shb->shab', np.ones(alphasets.shape), allset_all_histo_nom\n", + " \"sa,shb->shab\", np.ones(alphasets.shape), allset_all_histo_nom\n", " )\n", " set_results = allsets_all_histos_deltas * allsets_allhistos_noms_repeated\n", " return set_results" diff --git a/docs/examples/notebooks/learn/UsingCalculators.ipynb b/docs/examples/notebooks/learn/UsingCalculators.ipynb index 409dbbdef3..63ede12989 100644 --- a/docs/examples/notebooks/learn/UsingCalculators.ipynb +++ b/docs/examples/notebooks/learn/UsingCalculators.ipynb @@ -60,8 +60,8 @@ ], "source": [ "CLs_obs, CLs_exp = pyhf.infer.hypotest(1.0, data, model, return_expected_set=True)\n", - "print(f'CLs_obs = {CLs_obs}')\n", - "print(f'CLs_exp = {CLs_exp}')" + "print(f\"CLs_obs = {CLs_obs}\")\n", + "print(f\"CLs_exp = {CLs_exp}\")" ] }, { @@ -97,7 +97,7 @@ "outputs": [], "source": [ "asymp_calc = pyhf.infer.calculators.AsymptoticCalculator(\n", - " data, model, test_stat='qtilde'\n", + " data, model, test_stat=\"qtilde\"\n", ")" ] }, @@ -123,7 +123,7 @@ ], "source": [ "teststat = asymp_calc.teststatistic(poi_test=1.0)\n", - "print(f'qtilde = {teststat}')" + "print(f\"qtilde = {teststat}\")" ] }, { @@ -169,9 +169,9 @@ "p_b = b_dist.pvalue(teststat)\n", "p_s = p_sb / p_b\n", "\n", - "print(f'CL_sb = {p_sb}')\n", - "print(f'CL_b = {p_b}')\n", - "print(f'CL_s = CL_sb / CL_b = {p_s}')" + "print(f\"CL_sb = {p_sb}\")\n", + "print(f\"CL_b = {p_b}\")\n", + "print(f\"CL_s = CL_sb / CL_b = {p_s}\")" ] }, { @@ -232,9 +232,9 @@ "source": [ "p_sb, p_b, p_s = asymp_calc.pvalues(teststat, sb_dist, b_dist)\n", "\n", - "print(f'CL_sb = {p_sb}')\n", - "print(f'CL_b = {p_b}')\n", - "print(f'CL_s = CL_sb / CL_b = {p_s}')" + "print(f\"CL_sb = {p_sb}\")\n", + "print(f\"CL_b = {p_b}\")\n", + "print(f\"CL_s = CL_sb / CL_b = {p_s}\")" ] }, { @@ -262,9 +262,9 @@ "source": [ "p_exp_sb, p_exp_b, p_exp_s = asymp_calc.expected_pvalues(sb_dist, b_dist)\n", "\n", - "print(f'exp. CL_sb = {p_exp_sb}')\n", - "print(f'exp. CL_b = {p_exp_b}')\n", - "print(f'exp. CL_s = CL_sb / CL_b = {p_exp_s}')" + "print(f\"exp. CL_sb = {p_exp_sb}\")\n", + "print(f\"exp. CL_b = {p_exp_b}\")\n", + "print(f\"exp. CL_s = CL_sb / CL_b = {p_exp_s}\")" ] }, { @@ -285,7 +285,7 @@ "outputs": [], "source": [ "toy_calc = pyhf.infer.calculators.ToyCalculator(\n", - " data, model, test_stat='qtilde', ntoys=500\n", + " data, model, test_stat=\"qtilde\", ntoys=500\n", ")" ] }, @@ -311,7 +311,7 @@ ], "source": [ "teststat = toy_calc.teststatistic(poi_test=1.0)\n", - "print(f'qtilde = {teststat}')" + "print(f\"qtilde = {teststat}\")" ] }, { @@ -388,9 +388,9 @@ "source": [ "p_sb, p_b, p_s = toy_calc.pvalues(teststat, sb_dist, b_dist)\n", "\n", - "print(f'CL_sb = {p_sb}')\n", - "print(f'CL_b = {p_b}')\n", - "print(f'CL_s = CL_sb / CL_b = {p_s}')" + "print(f\"CL_sb = {p_sb}\")\n", + "print(f\"CL_b = {p_b}\")\n", + "print(f\"CL_s = CL_sb / CL_b = {p_s}\")" ] }, { @@ -418,9 +418,9 @@ "source": [ "p_exp_sb, p_exp_b, p_exp_s = toy_calc.expected_pvalues(sb_dist, b_dist)\n", "\n", - "print(f'exp. CL_sb = {p_exp_sb}')\n", - "print(f'exp. CL_b = {p_exp_b}')\n", - "print(f'exp. CL_s = CL_sb / CL_b = {p_exp_s}')" + "print(f\"exp. CL_sb = {p_exp_sb}\")\n", + "print(f\"exp. CL_b = {p_exp_b}\")\n", + "print(f\"exp. CL_s = CL_sb / CL_b = {p_exp_s}\")" ] } ], diff --git a/docs/examples/notebooks/multiBinPois.ipynb b/docs/examples/notebooks/multiBinPois.ipynb index 57bf6733c8..5c9296cbca 100644 --- a/docs/examples/notebooks/multiBinPois.ipynb +++ b/docs/examples/notebooks/multiBinPois.ipynb @@ -52,9 +52,9 @@ " 0,\n", " yerr=errors,\n", " linewidth=0,\n", - " error_kw=dict(ecolor='k', elinewidth=1),\n", + " error_kw=dict(ecolor=\"k\", elinewidth=1),\n", " )\n", - " ax.scatter(bin_centers, data, c='k')" + " ax.scatter(bin_centers, data, c=\"k\")" ] }, { @@ -67,7 +67,7 @@ }, "outputs": [], "source": [ - "validation_datadir = '../../validation/data'" + "validation_datadir = \"../../validation/data\"" ] }, { @@ -87,9 +87,9 @@ "source": [ "source = json.load(open(validation_datadir + \"/1bin_example1.json\", encoding=\"utf-8\"))\n", "model = uncorrelated_background(\n", - " source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr']\n", + " source[\"bindata\"][\"sig\"], source[\"bindata\"][\"bkg\"], source[\"bindata\"][\"bkgerr\"]\n", ")\n", - "data = source['bindata']['data'] + model.config.auxdata\n", + "data = source[\"bindata\"][\"data\"] + model.config.auxdata\n", "\n", "init_pars = model.config.suggested_init()\n", "par_bounds = model.config.suggested_bounds()\n", @@ -132,8 +132,8 @@ "source": [ "fig, ax = plt.subplots(figsize=(10, 7))\n", "artists = brazil.plot_results(poi_tests, tests, test_size=0.05, ax=ax)\n", - "print(f'expected upper limits: {exp_limits}')\n", - "print(f'observed upper limit : {obs_limit}')" + "print(f\"expected upper limits: {exp_limits}\")\n", + "print(f\"observed upper limit : {obs_limit}\")" ] }, { @@ -161,15 +161,15 @@ "}\n", "\n", "\n", - "my_observed_counts = source['bindata']['data']\n", + "my_observed_counts = source[\"bindata\"][\"data\"]\n", "\n", "model = uncorrelated_background(\n", - " source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr']\n", + " source[\"bindata\"][\"sig\"], source[\"bindata\"][\"bkg\"], source[\"bindata\"][\"bkgerr\"]\n", ")\n", "data = my_observed_counts + model.config.auxdata\n", "\n", "\n", - "binning = source['binning']\n", + "binning = source[\"binning\"]\n", "\n", "nompars = model.config.suggested_init()\n", "\n", @@ -247,7 +247,7 @@ "def CLs(m1, m2):\n", " signal_counts = signal(m1, m2)\n", " pdf = uncorrelated_background(\n", - " signal_counts, source['bindata']['bkg'], source['bindata']['bkgerr']\n", + " signal_counts, source[\"bindata\"][\"bkg\"], source[\"bindata\"][\"bkgerr\"]\n", " )\n", " try:\n", " cls_obs, cls_exp_set = pyhf.infer.hypotest(\n", @@ -255,7 +255,7 @@ " )\n", " return cls_obs, cls_exp_set, True\n", " except AssertionError:\n", - " print(f'fit failed for mass points ({m1}, {m2})')\n", + " print(f\"fit failed for mass points ({m1}, {m2})\")\n", " return None, None, False" ] }, @@ -306,16 +306,16 @@ } ], "source": [ - "int_obs = griddata(X, yobs, (grid_x, grid_y), method='linear')\n", + "int_obs = griddata(X, yobs, (grid_x, grid_y), method=\"linear\")\n", "\n", - "int_exp = [griddata(X, yexp[i], (grid_x, grid_y), method='linear') for i in range(5)]\n", + "int_exp = [griddata(X, yexp[i], (grid_x, grid_y), method=\"linear\") for i in range(5)]\n", "\n", "plt.contourf(grid_x, grid_y, int_obs, levels=np.linspace(0, 1))\n", "plt.colorbar()\n", "\n", - "plt.contour(grid_x, grid_y, int_obs, levels=[0.05], colors='w')\n", + "plt.contour(grid_x, grid_y, int_obs, levels=[0.05], colors=\"w\")\n", "for level in int_exp:\n", - " plt.contour(grid_x, grid_y, level, levels=[0.05], colors='w', linestyles='dashed')\n", + " plt.contour(grid_x, grid_y, level, levels=[0.05], colors=\"w\", linestyles=\"dashed\")\n", "\n", "plt.scatter(X[:, 0], X[:, 1], c=yobs, vmin=0, vmax=1);" ] diff --git a/docs/examples/notebooks/multichannel-coupled-normsys.ipynb b/docs/examples/notebooks/multichannel-coupled-normsys.ipynb index 0abcbc7e16..af36f752a2 100644 --- a/docs/examples/notebooks/multichannel-coupled-normsys.ipynb +++ b/docs/examples/notebooks/multichannel-coupled-normsys.ipynb @@ -40,40 +40,40 @@ "\n", "def prep_data(sourcedata):\n", " spec = {\n", - " 'signal': {\n", - " 'signal': {\n", - " 'data': sourcedata['signal']['bindata']['sig'],\n", - " 'mods': [{'name': 'mu', 'type': 'normfactor', 'data': None}],\n", + " \"signal\": {\n", + " \"signal\": {\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"sig\"],\n", + " \"mods\": [{\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}],\n", " },\n", - " 'bkg1': {\n", - " 'data': sourcedata['signal']['bindata']['bkg1'],\n", - " 'mods': [\n", + " \"bkg1\": {\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"bkg1\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'coupled_normsys',\n", - " 'type': 'normsys',\n", - " 'data': {'lo': 0.9, 'hi': 1.1},\n", + " \"name\": \"coupled_normsys\",\n", + " \"type\": \"normsys\",\n", + " \"data\": {\"lo\": 0.9, \"hi\": 1.1},\n", " }\n", " ],\n", " },\n", - " 'bkg2': {\n", - " 'data': sourcedata['signal']['bindata']['bkg2'],\n", - " 'mods': [\n", + " \"bkg2\": {\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"bkg2\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'coupled_normsys',\n", - " 'type': 'normsys',\n", - " 'data': {'lo': 0.5, 'hi': 1.5},\n", + " \"name\": \"coupled_normsys\",\n", + " \"type\": \"normsys\",\n", + " \"data\": {\"lo\": 0.5, \"hi\": 1.5},\n", " }\n", " ],\n", " },\n", " },\n", - " 'control': {\n", - " 'background': {\n", - " 'data': sourcedata['control']['bindata']['bkg1'],\n", - " 'mods': [\n", + " \"control\": {\n", + " \"background\": {\n", + " \"data\": sourcedata[\"control\"][\"bindata\"][\"bkg1\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'coupled_normsys',\n", - " 'type': 'normsys',\n", - " 'data': {'lo': 0.9, 'hi': 1.1},\n", + " \"name\": \"coupled_normsys\",\n", + " \"type\": \"normsys\",\n", + " \"data\": {\"lo\": 0.9, \"hi\": 1.1},\n", " }\n", " ],\n", " }\n", @@ -82,7 +82,7 @@ " pdf = Model(spec, poi_name=\"mu\")\n", " data = []\n", " for c in pdf.config.channel_order:\n", - " data += sourcedata[c]['bindata']['data']\n", + " data += sourcedata[c][\"bindata\"][\"data\"]\n", " data = data + pdf.config.auxdata\n", " return data, pdf" ] @@ -145,7 +145,7 @@ "}\n", "\n", "\n", - "d, pdf = prep_data(source['channels'])\n", + "d, pdf = prep_data(source[\"channels\"])\n", "\n", "print(d)\n", "\n", @@ -153,10 +153,10 @@ "par_bounds = pdf.config.suggested_bounds()\n", "\n", "unconpars = pyhf.unconstrained_bestfit(d, pdf, init_pars, par_bounds)\n", - "print('UNCON', unconpars)\n", + "print(\"UNCON\", unconpars)\n", "\n", "conpars = pyhf.constrained_bestfit(0.0, d, pdf, init_pars, par_bounds)\n", - "print('CONS', conpars)\n", + "print(\"CONS\", conpars)\n", "\n", "pdf.expected_data(conpars)" ] @@ -203,23 +203,23 @@ ], "source": [ "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(mutests, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(mutests, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(mutests, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)\n", "\n", "\n", "def invert_interval(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " point05cross = {'exp': [], 'obs': None}\n", + " point05cross = {\"exp\": [], \"obs\": None}\n", " for cls_exp_sigma in cls_exp:\n", " yvals = [x for x in cls_exp_sigma]\n", - " point05cross['exp'].append(\n", + " point05cross[\"exp\"].append(\n", " np.interp(test_size, list(reversed(yvals)), list(reversed(testmus)))\n", " )\n", "\n", " yvals = cls_obs\n", - " point05cross['obs'] = np.interp(\n", + " point05cross[\"obs\"] = np.interp(\n", " test_size, list(reversed(yvals)), list(reversed(testmus))\n", " )\n", " return point05cross\n", diff --git a/docs/examples/notebooks/multichannel-normsys.ipynb b/docs/examples/notebooks/multichannel-normsys.ipynb index df65572e49..09c0e91661 100644 --- a/docs/examples/notebooks/multichannel-normsys.ipynb +++ b/docs/examples/notebooks/multichannel-normsys.ipynb @@ -38,30 +38,30 @@ "\n", "def prep_data(sourcedata):\n", " spec = {\n", - " 'signal': {\n", - " 'signal': {\n", - " 'data': sourcedata['signal']['bindata']['sig'],\n", - " 'mods': [{'name': 'mu', 'type': 'normfactor', 'data': None}],\n", + " \"signal\": {\n", + " \"signal\": {\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"sig\"],\n", + " \"mods\": [{\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}],\n", " },\n", - " 'background': {\n", - " 'data': sourcedata['signal']['bindata']['bkg'],\n", - " 'mods': [\n", + " \"background\": {\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"bkg\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'uncorr_bkguncrt_signal',\n", - " 'type': 'shapesys',\n", - " 'data': sourcedata['signal']['bindata']['bkgerr'],\n", + " \"name\": \"uncorr_bkguncrt_signal\",\n", + " \"type\": \"shapesys\",\n", + " \"data\": sourcedata[\"signal\"][\"bindata\"][\"bkgerr\"],\n", " }\n", " ],\n", " },\n", " },\n", - " 'control': {\n", - " 'background': {\n", - " 'data': sourcedata['control']['bindata']['bkg'],\n", - " 'mods': [\n", + " \"control\": {\n", + " \"background\": {\n", + " \"data\": sourcedata[\"control\"][\"bindata\"][\"bkg\"],\n", + " \"mods\": [\n", " {\n", - " 'name': 'uncorr_bkguncrt_control',\n", - " 'type': 'shapesys',\n", - " 'data': sourcedata['control']['bindata']['bkgerr'],\n", + " \"name\": \"uncorr_bkguncrt_control\",\n", + " \"type\": \"shapesys\",\n", + " \"data\": sourcedata[\"control\"][\"bindata\"][\"bkgerr\"],\n", " }\n", " ],\n", " }\n", @@ -70,7 +70,7 @@ " pdf = Model(spec)\n", " data = []\n", " for c in pdf.config.channel_order:\n", - " data += sourcedata[c]['bindata']['data']\n", + " data += sourcedata[c][\"bindata\"][\"data\"]\n", " data = data + pdf.config.auxdata\n", " return data, pdf" ] @@ -125,7 +125,7 @@ " }\n", "}\n", "\n", - "d, pdf = prep_data(source['channels'])\n", + "d, pdf = prep_data(source[\"channels\"])\n", "\n", "print(d)\n", "\n", @@ -136,7 +136,7 @@ "print(pdf.pdf(init_pars, d))\n", "\n", "unconpars = pyhf.unconstrained_bestfit(d, pdf, init_pars, par_bounds)\n", - "print('UNCON', unconpars)\n", + "print(\"UNCON\", unconpars)\n", "\n", "\n", "# print d\n", @@ -144,7 +144,7 @@ "\n", "\n", "conpars = pyhf.constrained_bestfit(0.0, d, pdf, init_pars, par_bounds)\n", - "print('CONS', conpars)\n", + "print(\"CONS\", conpars)\n", "\n", "\n", "# print pdf.expected_data(conpars)\n", @@ -202,23 +202,23 @@ ], "source": [ "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(mutests, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(mutests, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(mutests, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)\n", "\n", "\n", "def invert_interval(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " point05cross = {'exp': [], 'obs': None}\n", + " point05cross = {\"exp\": [], \"obs\": None}\n", " for cls_exp_sigma in cls_exp:\n", " yvals = [x for x in cls_exp_sigma]\n", - " point05cross['exp'].append(\n", + " point05cross[\"exp\"].append(\n", " np.interp(test_size, list(reversed(yvals)), list(reversed(testmus)))\n", " )\n", "\n", " yvals = cls_obs\n", - " point05cross['obs'] = np.interp(\n", + " point05cross[\"obs\"] = np.interp(\n", " test_size, list(reversed(yvals)), list(reversed(testmus))\n", " )\n", " return point05cross\n", diff --git a/docs/examples/notebooks/normsys.ipynb b/docs/examples/notebooks/normsys.ipynb index 056eeaa808..c4f58f3d59 100644 --- a/docs/examples/notebooks/normsys.ipynb +++ b/docs/examples/notebooks/normsys.ipynb @@ -36,18 +36,18 @@ "\n", "def prep_data(observed_counts, nominal_background, nominal_signals):\n", " spec = {\n", - " 'singlechannel': {\n", - " 'signal': {\n", - " 'data': nominal_signals,\n", - " 'mods': [{'name': 'mu', 'type': 'normfactor', 'data': None}],\n", + " \"singlechannel\": {\n", + " \"signal\": {\n", + " \"data\": nominal_signals,\n", + " \"mods\": [{\"name\": \"mu\", \"type\": \"normfactor\", \"data\": None}],\n", " },\n", - " 'background': {\n", - " 'data': nominal_background,\n", - " 'mods': [\n", + " \"background\": {\n", + " \"data\": nominal_background,\n", + " \"mods\": [\n", " {\n", - " 'name': 'bkg_norm',\n", - " 'type': 'normsys',\n", - " 'data': {'lo': 0.90, 'hi': 1.10},\n", + " \"name\": \"bkg_norm\",\n", + " \"type\": \"normsys\",\n", + " \"data\": {\"lo\": 0.90, \"hi\": 1.10},\n", " }\n", " ],\n", " },\n", @@ -87,18 +87,18 @@ " },\n", "}\n", "d, pdf = prep_data(\n", - " source['bindata']['data'], source['bindata']['bkg'], source['bindata']['sig']\n", + " source[\"bindata\"][\"data\"], source[\"bindata\"][\"bkg\"], source[\"bindata\"][\"sig\"]\n", ")\n", "\n", "init_pars = [0.0, 0.0]\n", "par_bounds = [[0, 10], [-5, 5]]\n", "\n", "unconpars = pyhf.unconstrained_bestfit(d, pdf, init_pars, par_bounds)\n", - "print('UNCON', unconpars)\n", + "print(\"UNCON\", unconpars)\n", "\n", "\n", "conpars = pyhf.constrained_bestfit(0.0, d, pdf, init_pars, par_bounds)\n", - "print('CONS', conpars)\n", + "print(\"CONS\", conpars)\n", "\n", "\n", "print(pdf.expected_data(conpars))\n", @@ -107,7 +107,7 @@ "aux = pdf.expected_auxdata(conpars)\n", "# print '????',aux\n", "\n", - "print('ASIMOV', pyhf.generate_asimov_data(0.0, d, pdf, init_pars, par_bounds))" + "print(\"ASIMOV\", pyhf.generate_asimov_data(0.0, d, pdf, init_pars, par_bounds))" ] }, { @@ -153,23 +153,23 @@ ], "source": [ "def plot_results(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " plt.plot(mutests, cls_obs, c='k')\n", - " for i, c in zip(range(5), ['grey', 'grey', 'grey', 'grey', 'grey']):\n", + " plt.plot(mutests, cls_obs, c=\"k\")\n", + " for i, c in zip(range(5), [\"grey\", \"grey\", \"grey\", \"grey\", \"grey\"]):\n", " plt.plot(mutests, cls_exp[i], c=c)\n", - " plt.plot(testmus, [test_size] * len(testmus), c='r')\n", + " plt.plot(testmus, [test_size] * len(testmus), c=\"r\")\n", " plt.ylim(0, 1)\n", "\n", "\n", "def invert_interval(testmus, cls_obs, cls_exp, test_size=0.05):\n", - " point05cross = {'exp': [], 'obs': None}\n", + " point05cross = {\"exp\": [], \"obs\": None}\n", " for cls_exp_sigma in cls_exp:\n", " yvals = [x for x in cls_exp_sigma]\n", - " point05cross['exp'].append(\n", + " point05cross[\"exp\"].append(\n", " np.interp(test_size, list(reversed(yvals)), list(reversed(testmus)))\n", " )\n", "\n", " yvals = cls_obs\n", - " point05cross['obs'] = np.interp(\n", + " point05cross[\"obs\"] = np.interp(\n", " test_size, list(reversed(yvals)), list(reversed(testmus))\n", " )\n", " return point05cross\n", diff --git a/docs/exts/xref.py b/docs/exts/xref.py index d61cb9e8f0..597595115e 100644 --- a/docs/exts/xref.py +++ b/docs/exts/xref.py @@ -10,7 +10,7 @@ def xref(typ, rawtext, text, lineno, inliner, options=None, content=None): title = target = text # titleistarget = True # look if explicit title and target are given with `foo ` syntax - brace = text.find('<') + brace = text.find("<") if brace != -1: # titleistarget = False m = caption_ref_re.match(text) @@ -37,6 +37,6 @@ def get_refs(app): def setup(app): - app.add_config_value('xref_links', {}, True) - app.add_role('xref', xref) + app.add_config_value("xref_links", {}, True) + app.add_role("xref", xref) app.connect("builder-inited", get_refs) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 68eb6db76e..77364d5d3c 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -39,10 +39,10 @@ def __dir__(): def __getattr__(name): - if name == 'tensorlib': + if name == "tensorlib": return get_backend(default=False)[0] - if name == 'optimizer': + if name == "optimizer": return get_backend(default=False)[1] - if name == 'default_backend': + if name == "default_backend": return get_backend(default=True)[0] raise AttributeError diff --git a/src/pyhf/cli/__init__.py b/src/pyhf/cli/__init__.py index f11dae7cc5..faacaf1dce 100644 --- a/src/pyhf/cli/__init__.py +++ b/src/pyhf/cli/__init__.py @@ -7,7 +7,7 @@ from pyhf.cli.spec import cli as spec from pyhf.contrib import cli as contrib -__all__ = ['cli', 'complete', 'contrib', 'infer', 'rootio', 'spec'] +__all__ = ["cli", "complete", "contrib", "infer", "rootio", "spec"] def __dir__(): diff --git a/src/pyhf/cli/cli.py b/src/pyhf/cli/cli.py index d4fb8c779d..946e85b3ed 100644 --- a/src/pyhf/cli/cli.py +++ b/src/pyhf/cli/cli.py @@ -19,7 +19,7 @@ def _print_citation(ctx, param, value): ctx.exit() -@click.group(context_settings=dict(help_option_names=['-h', '--help'])) +@click.group(context_settings=dict(help_option_names=["-h", "--help"])) @click.version_option(version=__version__) @click.option( "--cite", diff --git a/src/pyhf/cli/complete.py b/src/pyhf/cli/complete.py index c39e6e9784..aa377933a8 100644 --- a/src/pyhf/cli/complete.py +++ b/src/pyhf/cli/complete.py @@ -1,4 +1,4 @@ -'''Shell completions for pyhf.''' +"""Shell completions for pyhf.""" import click @@ -7,20 +7,20 @@ click_completion.init() - @click.command(help='Generate shell completion code.', name='completions') + @click.command(help="Generate shell completion code.", name="completions") @click.argument( - 'shell', + "shell", required=False, type=click_completion.DocumentedChoice(click_completion.core.shells), ) def cli(shell): - '''Generate shell completion code for various shells.''' - click.echo(click_completion.core.get_code(shell, prog_name='pyhf')) + """Generate shell completion code for various shells.""" + click.echo(click_completion.core.get_code(shell, prog_name="pyhf")) except ImportError: - @click.command(help='Generate shell completion code.', name='completions') - @click.argument('shell', default=None) + @click.command(help="Generate shell completion code.", name="completions") + @click.argument("shell", default=None) def cli(shell): """Placeholder for shell completion code generatioon function if necessary dependency is missing.""" click.secho( diff --git a/src/pyhf/cli/infer.py b/src/pyhf/cli/infer.py index b3acb5ba8d..665c5acf5d 100644 --- a/src/pyhf/cli/infer.py +++ b/src/pyhf/cli/infer.py @@ -13,7 +13,7 @@ log = logging.getLogger(__name__) -@click.group(name='infer') +@click.group(name="infer") def cli(): """Infererence CLI group.""" @@ -130,24 +130,24 @@ def fit( @cli.command() -@click.argument('workspace', default='-') +@click.argument("workspace", default="-") @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) -@click.option('--measurement', default=None) -@click.option('-p', '--patch', multiple=True) -@click.option('--test-poi', default=1.0) -@click.option('--test-stat', type=click.Choice(['q', 'qtilde']), default='qtilde') +@click.option("--measurement", default=None) +@click.option("-p", "--patch", multiple=True) +@click.option("--test-poi", default=1.0) +@click.option("--test-stat", type=click.Choice(["q", "qtilde"]), default="qtilde") @click.option( - '--calctype', type=click.Choice(['asymptotics', 'toybased']), default='asymptotics' + "--calctype", type=click.Choice(["asymptotics", "toybased"]), default="asymptotics" ) @click.option( - '--backend', - type=click.Choice(['numpy', 'jax', 'np']), - help='The tensor backend used for the calculation.', - default='numpy', + "--backend", + type=click.Choice(["numpy", "jax", "np"]), + help="The tensor backend used for the calculation.", + default="numpy", ) @click.option( "--optimizer", @@ -155,7 +155,7 @@ def fit( help="The optimizer used for the calculation.", default="scipy", ) -@click.option('--optconf', type=EqDelimStringParamType(), multiple=True) +@click.option("--optconf", type=EqDelimStringParamType(), multiple=True) def cls( workspace, output_file, @@ -202,8 +202,8 @@ def cls( measurement_name=measurement, patches=patches, modifier_settings={ - 'normsys': {'interpcode': 'code4'}, - 'histosys': {'interpcode': 'code4p'}, + "normsys": {"interpcode": "code4"}, + "histosys": {"interpcode": "code4p"}, }, ) @@ -219,7 +219,7 @@ def cls( # set the new optimizer if optimizer: new_optimizer = getattr(optimize, optimizer) or getattr( - optimize, f'{optimizer}_optimizer' + optimize, f"{optimizer}_optimizer" ) set_backend(tensorlib, new_optimizer(**optconf)) @@ -232,8 +232,8 @@ def cls( return_expected_set=True, ) result = { - 'CLs_obs': tensorlib.tolist(result[0]), - 'CLs_exp': [tensorlib.tolist(tensor) for tensor in result[-1]], + "CLs_obs": tensorlib.tolist(result[0]), + "CLs_exp": [tensorlib.tolist(tensor) for tensor in result[-1]], } if output_file is None: diff --git a/src/pyhf/cli/patchset.py b/src/pyhf/cli/patchset.py index 5ba1deb4da..e0784acca4 100644 --- a/src/pyhf/cli/patchset.py +++ b/src/pyhf/cli/patchset.py @@ -12,21 +12,21 @@ log = logging.getLogger(__name__) -@click.group(name='patchset') +@click.group(name="patchset") def cli(): """Operations involving patchsets.""" @cli.command() -@click.argument('patchset', default='-') -@click.option('--name', help='The name of the patch to extract.', default=None) +@click.argument("patchset", default="-") +@click.option("--name", help="The name of the patch to extract.", default=None) @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) @click.option( - '--with-metadata/--without-metadata', + "--with-metadata/--without-metadata", default=False, help="Include patchset metadata in output.", ) @@ -47,8 +47,8 @@ def extract(patchset, name, output_file, with_metadata): patch = patchset[name] if with_metadata: - result = {'metadata': patch.metadata, 'patch': patch.patch} - result['metadata'].update(patchset.metadata) + result = {"metadata": patch.metadata, "patch": patch.patch} + result["metadata"].update(patchset.metadata) else: result = patch.patch @@ -61,12 +61,12 @@ def extract(patchset, name, output_file, with_metadata): @cli.command() -@click.argument('background-only', default='-') -@click.argument('patchset', default='-') -@click.option('--name', help='The name of the patch to extract.', default=None) +@click.argument("background-only", default="-") +@click.argument("patchset", default="-") +@click.option("--name", help="The name of the patch to extract.", default=None) @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) def apply(background_only, patchset, name, output_file): @@ -100,8 +100,8 @@ def apply(background_only, patchset, name, output_file): @cli.command() -@click.argument('background-only', default='-') -@click.argument('patchset', default='-') +@click.argument("background-only", default="-") +@click.argument("patchset", default="-") def verify(background_only, patchset): """ Verify the patchset digests against a background-only workspace specification. Verified if no exception was raised. @@ -127,7 +127,7 @@ def verify(background_only, patchset): @cli.command() -@click.argument('patchset', default='-') +@click.argument("patchset", default="-") def inspect(patchset): """ Inspect the PatchSet (e.g. list individual patches). @@ -139,7 +139,7 @@ def inspect(patchset): patchset_spec = json.load(fstream) patchset = PatchSet(patchset_spec) - click.secho(f'\n {len(patchset.patches)} patches found in Patchset') - click.secho('---------------------------------\n') + click.secho(f"\n {len(patchset.patches)} patches found in Patchset") + click.secho("---------------------------------\n") for p in patchset.patches: click.echo(p.name) diff --git a/src/pyhf/cli/rootio.py b/src/pyhf/cli/rootio.py index 90d5c427eb..7880db1b81 100644 --- a/src/pyhf/cli/rootio.py +++ b/src/pyhf/cli/rootio.py @@ -13,34 +13,34 @@ log = logging.getLogger(__name__) -@click.group(name='rootio') +@click.group(name="rootio") def cli(): """ROOT I/O CLI group.""" @cli.command() -@click.argument('entrypoint-xml', type=click.Path(exists=True)) +@click.argument("entrypoint-xml", type=click.Path(exists=True)) @click.option( - '--basedir', - help='The base directory for the XML files to point relative to.', + "--basedir", + help="The base directory for the XML files to point relative to.", type=click.Path(exists=True), default=Path.cwd(), ) @click.option( - '-v', - '--mount', - help='Consists of two fields, separated by a colon character ( : ). The first field is the local path to where files are located, the second field is the path where the file or directory are saved in the XML configuration. This is similar in spirit to Docker.', + "-v", + "--mount", + help="Consists of two fields, separated by a colon character ( : ). The first field is the local path to where files are located, the second field is the path where the file or directory are saved in the XML configuration. This is similar in spirit to Docker.", type=VolumeMountPath(exists=True, resolve_path=True, path_type=Path), default=None, multiple=True, ) @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) -@click.option('--track-progress/--hide-progress', default=True) -@click.option('--validation-as-error/--validation-as-warning', default=True) +@click.option("--track-progress/--hide-progress", default=True) +@click.option("--validation-as-error/--validation-as-warning", default=True) def xml2json( entrypoint_xml, basedir, mount, output_file, track_progress, validation_as_error ): @@ -73,12 +73,12 @@ def xml2json( @cli.command() -@click.argument('workspace', default='-') -@click.option('--output-dir', type=click.Path(exists=True), default='.') -@click.option('--specroot', default='config') -@click.option('--dataroot', default='data') -@click.option('--resultprefix', default='FitConfig') -@click.option('-p', '--patch', multiple=True) +@click.argument("workspace", default="-") +@click.option("--output-dir", type=click.Path(exists=True), default=".") +@click.option("--specroot", default="config") +@click.option("--dataroot", default="data") +@click.option("--resultprefix", default="FitConfig") +@click.option("-p", "--patch", multiple=True) def json2xml(workspace, output_dir, specroot, dataroot, resultprefix, patch): """Convert pyhf JSON back to XML + ROOT files.""" try: @@ -110,5 +110,5 @@ def json2xml(workspace, output_dir, specroot, dataroot, resultprefix, patch): Path(output_dir).joinpath(specroot), Path(output_dir).joinpath(dataroot), resultprefix, - ).decode('utf-8') + ).decode("utf-8") ) diff --git a/src/pyhf/cli/spec.py b/src/pyhf/cli/spec.py index 7cc0430b00..f330327758 100644 --- a/src/pyhf/cli/spec.py +++ b/src/pyhf/cli/spec.py @@ -11,19 +11,19 @@ log = logging.getLogger(__name__) -@click.group(name='spec') +@click.group(name="spec") def cli(): """Spec CLI group.""" @cli.command() -@click.argument('workspace', default='-') +@click.argument("workspace", default="-") @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) -@click.option('--measurement', default=None) +@click.option("--measurement", default=None) def inspect(workspace, output_file, measurement): """ Inspect a pyhf JSON document. @@ -66,92 +66,92 @@ def inspect(workspace, output_file, measurement): default_measurement = ws.get_measurement() result = {} - result['samples'] = ws.samples - result['channels'] = [ + result["samples"] = ws.samples + result["channels"] = [ (channel, ws.channel_nbins[channel]) for channel in ws.channels ] - result['modifiers'] = dict(ws.modifiers) + result["modifiers"] = dict(ws.modifiers) parset_descr = { - parameters.paramsets.unconstrained: 'unconstrained', - parameters.paramsets.constrained_by_normal: 'constrained_by_normal', - parameters.paramsets.constrained_by_poisson: 'constrained_by_poisson', + parameters.paramsets.unconstrained: "unconstrained", + parameters.paramsets.constrained_by_normal: "constrained_by_normal", + parameters.paramsets.constrained_by_poisson: "constrained_by_poisson", } model = ws.model() - result['parameters'] = sorted( - (parset_name, parset_descr[type(parset_spec['paramset'])]) + result["parameters"] = sorted( + (parset_name, parset_descr[type(parset_spec["paramset"])]) for parset_name, parset_spec in model.config.par_map.items() ) - result['systematics'] = [ + result["systematics"] = [ ( parameter[0], parameter[1], [modifier[1] for modifier in ws.modifiers if modifier[0] == parameter[0]], ) - for parameter in result['parameters'] + for parameter in result["parameters"] ] - result['measurements'] = [ - (m['name'], m['config']['poi'], [p['name'] for p in m['config']['parameters']]) - for m in ws.get('measurements') + result["measurements"] = [ + (m["name"], m["config"]["poi"], [p["name"] for p in m["config"]["parameters"]]) + for m in ws.get("measurements") ] maxlen_channels = max(map(len, ws.channels)) maxlen_samples = max(map(len, ws.samples)) - maxlen_parameters = max(map(len, [p for p, _ in result['parameters']])) - maxlen_measurements = max(map(lambda x: len(x[0]), result['measurements'])) + maxlen_parameters = max(map(len, [p for p, _ in result["parameters"]])) + maxlen_measurements = max(map(lambda x: len(x[0]), result["measurements"])) maxlen = max( [maxlen_channels, maxlen_samples, maxlen_parameters, maxlen_measurements] ) # summary - fmtStr = '{{: >{:d}s}} {{:s}}'.format(maxlen + len('Summary')) - click.echo(fmtStr.format(' Summary ', '')) - click.echo(fmtStr.format('-' * 18, '')) - fmtStr = f'{{0: >{maxlen:d}s}} {{1:s}}' - for key in ['channels', 'samples', 'parameters', 'modifiers']: + fmtStr = "{{: >{:d}s}} {{:s}}".format(maxlen + len("Summary")) + click.echo(fmtStr.format(" Summary ", "")) + click.echo(fmtStr.format("-" * 18, "")) + fmtStr = f"{{0: >{maxlen:d}s}} {{1:s}}" + for key in ["channels", "samples", "parameters", "modifiers"]: click.echo(fmtStr.format(key, str(len(result[key])))) click.echo() - fmtStr = f'{{0: >{maxlen:d}s}} {{1: ^5s}}' - click.echo(fmtStr.format('channels', 'nbins')) - click.echo(fmtStr.format('-' * 10, '-' * 5)) - for channel, nbins in result['channels']: + fmtStr = f"{{0: >{maxlen:d}s}} {{1: ^5s}}" + click.echo(fmtStr.format("channels", "nbins")) + click.echo(fmtStr.format("-" * 10, "-" * 5)) + for channel, nbins in result["channels"]: click.echo(fmtStr.format(channel, str(nbins))) click.echo() - fmtStr = f'{{0: >{maxlen:d}s}}' - click.echo(fmtStr.format('samples')) - click.echo(fmtStr.format('-' * 10)) - for sample in result['samples']: + fmtStr = f"{{0: >{maxlen:d}s}}" + click.echo(fmtStr.format("samples")) + click.echo(fmtStr.format("-" * 10)) + for sample in result["samples"]: click.echo(fmtStr.format(sample)) click.echo() # print parameters, constraints, modifiers - fmtStr = f'{{0: >{maxlen:d}s}} {{1: <22s}} {{2:s}}' - click.echo(fmtStr.format('parameters', 'constraint', 'modifiers')) - click.echo(fmtStr.format('-' * 10, '-' * 10, '-' * 10)) - for parname, constraint, modtypes in result['systematics']: - click.echo(fmtStr.format(parname, constraint, ','.join(sorted(set(modtypes))))) + fmtStr = f"{{0: >{maxlen:d}s}} {{1: <22s}} {{2:s}}" + click.echo(fmtStr.format("parameters", "constraint", "modifiers")) + click.echo(fmtStr.format("-" * 10, "-" * 10, "-" * 10)) + for parname, constraint, modtypes in result["systematics"]: + click.echo(fmtStr.format(parname, constraint, ",".join(sorted(set(modtypes))))) click.echo() - fmtStr = f'{{0: >{maxlen:d}s}} {{1: ^22s}} {{2:s}}' - click.echo(fmtStr.format('measurement', 'poi', 'parameters')) - click.echo(fmtStr.format('-' * 10, '-' * 10, '-' * 10)) + fmtStr = f"{{0: >{maxlen:d}s}} {{1: ^22s}} {{2:s}}" + click.echo(fmtStr.format("measurement", "poi", "parameters")) + click.echo(fmtStr.format("-" * 10, "-" * 10, "-" * 10)) for measurement_name, measurement_poi, measurement_parameters in result[ - 'measurements' + "measurements" ]: click.echo( fmtStr.format( - ('(*) ' if measurement_name == default_measurement['name'] else '') + ("(*) " if measurement_name == default_measurement["name"] else "") + measurement_name, measurement_poi, ( - ','.join(measurement_parameters) + ",".join(measurement_parameters) if measurement_parameters - else '(none)' + else "(none)" ), ) ) @@ -165,23 +165,23 @@ def inspect(workspace, output_file, measurement): @cli.command() -@click.argument('workspace', default='-') +@click.argument("workspace", default="-") @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) -@click.option('-c', '--channel', default=[], multiple=True, metavar='...') -@click.option('-s', '--sample', default=[], multiple=True, metavar='...') -@click.option('-m', '--modifier', default=[], multiple=True, metavar='...') +@click.option("-c", "--channel", default=[], multiple=True, metavar="...") +@click.option("-s", "--sample", default=[], multiple=True, metavar="...") +@click.option("-m", "--modifier", default=[], multiple=True, metavar="...") @click.option( - '-t', - '--modifier-type', + "-t", + "--modifier-type", default=[], multiple=True, type=click.Choice(modifiers.histfactory_set), ) -@click.option('--measurement', default=[], multiple=True, metavar='...') +@click.option("--measurement", default=[], multiple=True, metavar="...") def prune( workspace, output_file, channel, sample, modifier, modifier_type, measurement ): @@ -211,42 +211,42 @@ def prune( @cli.command() -@click.argument('workspace', default='-') +@click.argument("workspace", default="-") @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) @click.option( - '-c', - '--channel', + "-c", + "--channel", default=[], multiple=True, type=click.Tuple([str, str]), - metavar=' ...', + metavar=" ...", ) @click.option( - '-s', - '--sample', + "-s", + "--sample", default=[], multiple=True, type=click.Tuple([str, str]), - metavar=' ...', + metavar=" ...", ) @click.option( - '-m', - '--modifier', + "-m", + "--modifier", default=[], multiple=True, type=click.Tuple([str, str]), - metavar=' ...', + metavar=" ...", ) @click.option( - '--measurement', + "--measurement", default=[], multiple=True, type=click.Tuple([str, str]), - metavar=' ...', + metavar=" ...", ) def rename(workspace, output_file, channel, sample, modifier, measurement): """ @@ -274,23 +274,23 @@ def rename(workspace, output_file, channel, sample, modifier, measurement): @cli.command() -@click.argument('workspace-one', default='-') -@click.argument('workspace-two', default='-') +@click.argument("workspace-one", default="-") +@click.argument("workspace-two", default="-") @click.option( - '-j', - '--join', - default='none', + "-j", + "--join", + default="none", type=click.Choice(Workspace.valid_joins), - help='The join operation to apply when combining the two workspaces.', + help="The join operation to apply when combining the two workspaces.", ) @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) @click.option( - '--merge-channels/--no-merge-channels', - help='Whether or not to deeply merge channels. Can only be done with left/right outer joins.', + "--merge-channels/--no-merge-channels", + help="Whether or not to deeply merge channels. Can only be done with left/right outer joins.", default=False, ) def combine(workspace_one, workspace_two, join, output_file, merge_channels): @@ -320,19 +320,19 @@ def combine(workspace_one, workspace_two, join, output_file, merge_channels): @cli.command() -@click.argument('workspace', default='-') +@click.argument("workspace", default="-") @click.option( - '-a', - '--algorithm', - default=['sha256'], + "-a", + "--algorithm", + default=["sha256"], multiple=True, - help='The hashing algorithm used to compute the workspace digest.', + help="The hashing algorithm used to compute the workspace digest.", ) @click.option( - '-j/-p', - '--json/--plaintext', - 'output_json', - help='Output the hash values as a JSON dictionary or plaintext strings', + "-j/-p", + "--json/--plaintext", + "output_json", + help="Output the hash values as a JSON dictionary or plaintext strings", ) def digest(workspace, algorithm, output_json): """ @@ -360,7 +360,7 @@ def digest(workspace, algorithm, output_json): if output_json: output = json.dumps(digests, indent=4, sort_keys=True) else: - output = '\n'.join( + output = "\n".join( f"{hash_alg}:{digest}" for hash_alg, digest in digests.items() ) @@ -368,10 +368,10 @@ def digest(workspace, algorithm, output_json): @cli.command() -@click.argument('workspace', default='-') +@click.argument("workspace", default="-") @click.option( - '--output-file', - help='The location of the output json file. If not specified, prints to screen.', + "--output-file", + help="The location of the output json file. If not specified, prints to screen.", default=None, ) def sort(workspace, output_file): diff --git a/src/pyhf/compat.py b/src/pyhf/compat.py index 95d57ba144..717157efff 100644 --- a/src/pyhf/compat.py +++ b/src/pyhf/compat.py @@ -44,13 +44,13 @@ def paramset_to_rootnames(paramset): ['gamma_uncorr_bkguncrt_0', 'gamma_uncorr_bkguncrt_1'] """ - if paramset.name == 'lumi': - return 'Lumi' + if paramset.name == "lumi": + return "Lumi" if paramset.is_scalar: if paramset.constrained: - return f'alpha_{paramset.name}' - return f'{paramset.name}' - return [f'gamma_{paramset.name}_{index}' for index in range(paramset.n_parameters)] + return f"alpha_{paramset.name}" + return f"{paramset.name}" + return [f"gamma_{paramset.name}_{index}" for index in range(paramset.n_parameters)] def interpret_rootname(rootname): @@ -88,33 +88,33 @@ def interpret_rootname(rootname): """ interpretation = { - 'constrained': 'n/a', - 'is_scalar': 'n/a', - 'name': 'n/a', - 'element': 'n/a', + "constrained": "n/a", + "is_scalar": "n/a", + "name": "n/a", + "element": "n/a", } - if rootname.startswith('gamma_'): - interpretation['is_scalar'] = False - match = re.search(r'^gamma_(.+)_(\d+)$', rootname) + if rootname.startswith("gamma_"): + interpretation["is_scalar"] = False + match = re.search(r"^gamma_(.+)_(\d+)$", rootname) if not match: - raise ValueError(f'confusing rootname {rootname}. Please report as a bug.') - interpretation['name'] = match.group(1) - interpretation['element'] = int(match.group(2)) + raise ValueError(f"confusing rootname {rootname}. Please report as a bug.") + interpretation["name"] = match.group(1) + interpretation["element"] = int(match.group(2)) else: - interpretation['is_scalar'] = True + interpretation["is_scalar"] = True - if rootname.startswith('alpha_'): - interpretation['constrained'] = True - match = re.search(r'^alpha_(.+)$', rootname) + if rootname.startswith("alpha_"): + interpretation["constrained"] = True + match = re.search(r"^alpha_(.+)$", rootname) if not match: - raise ValueError(f'confusing rootname {rootname}. Please report as a bug.') - interpretation['name'] = match.group(1) + raise ValueError(f"confusing rootname {rootname}. Please report as a bug.") + interpretation["name"] = match.group(1) - if not (rootname.startswith('alpha_') or rootname.startswith('gamma_')): - interpretation['constrained'] = False - interpretation['name'] = rootname + if not (rootname.startswith("alpha_") or rootname.startswith("gamma_")): + interpretation["constrained"] = False + interpretation["name"] = rootname - if rootname == 'Lumi': - interpretation['name'] = 'lumi' + if rootname == "Lumi": + interpretation["name"] = "lumi" return interpretation diff --git a/src/pyhf/constraints.py b/src/pyhf/constraints.py index acd1b35af3..9386f119ef 100644 --- a/src/pyhf/constraints.py +++ b/src/pyhf/constraints.py @@ -24,7 +24,7 @@ def __init__(self, pdfconfig, batch_size=None): pars_constrained_by_normal = [ constrained_parameter for constrained_parameter in pdfconfig.auxdata_order - if pdfconfig.param_set(constrained_parameter).pdf_type == 'normal' + if pdfconfig.param_set(constrained_parameter).pdf_type == "normal" ] parfield_shape = (self.batch_size or 1, pdfconfig.npars) @@ -42,7 +42,7 @@ def __init__(self, pdfconfig, batch_size=None): end_index = start_index + parset.n_parameters thisauxdata = self.data_indices[start_index:end_index] start_index = end_index - if not parset.pdf_type == 'normal': + if not parset.pdf_type == "normal": continue normal_constraint_data.append(thisauxdata) @@ -64,7 +64,7 @@ def __init__(self, pdfconfig, batch_size=None): # start preparing constant tensors if self.param_viewer.index_selection: self._normal_data = default_backend.astensor( - default_backend.concatenate(normal_constraint_data), dtype='int' + default_backend.concatenate(normal_constraint_data), dtype="int" ) _normal_sigmas = default_backend.concatenate(normal_constraint_sigmas) @@ -80,15 +80,15 @@ def __init__(self, pdfconfig, batch_size=None): self._access_field = access_field self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): if not self.param_viewer.index_selection: return tensorlib, _ = get_backend() self.sigmas = tensorlib.astensor(self._sigmas) - self.normal_data = tensorlib.astensor(self._normal_data, dtype='int') - self.access_field = tensorlib.astensor(self._access_field, dtype='int') + self.normal_data = tensorlib.astensor(self._normal_data, dtype="int") + self.access_field = tensorlib.astensor(self._access_field, dtype="int") def has_pdf(self): """ @@ -159,7 +159,7 @@ def __init__(self, pdfconfig, batch_size=None): pars_constrained_by_poisson = [ constrained_parameter for constrained_parameter in pdfconfig.auxdata_order - if pdfconfig.param_set(constrained_parameter).pdf_type == 'poisson' + if pdfconfig.param_set(constrained_parameter).pdf_type == "poisson" ] parfield_shape = (self.batch_size or 1, pdfconfig.npars) @@ -174,7 +174,7 @@ def __init__(self, pdfconfig, batch_size=None): end_index = start_index + parset.n_parameters thisauxdata = self.data_indices[start_index:end_index] start_index = end_index - if not parset.pdf_type == 'poisson': + if not parset.pdf_type == "poisson": continue poisson_constraint_data.append(thisauxdata) @@ -185,12 +185,12 @@ def __init__(self, pdfconfig, batch_size=None): self._batched_factors = None if self.param_viewer.index_selection: self._poisson_data = default_backend.astensor( - default_backend.concatenate(poisson_constraint_data), dtype='int' + default_backend.concatenate(poisson_constraint_data), dtype="int" ) _poisson_rate_fac = default_backend.astensor( default_backend.concatenate(poisson_constraint_rate_factors), - dtype='float', + dtype="float", ) factors = default_backend.reshape(_poisson_rate_fac, (1, -1)) self._batched_factors = default_backend.tile( @@ -203,14 +203,14 @@ def __init__(self, pdfconfig, batch_size=None): self._access_field = access_field self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): if not self.param_viewer.index_selection: return tensorlib, _ = get_backend() - self.poisson_data = tensorlib.astensor(self._poisson_data, dtype='int') - self.access_field = tensorlib.astensor(self._access_field, dtype='int') + self.poisson_data = tensorlib.astensor(self._poisson_data, dtype="int") + self.access_field = tensorlib.astensor(self._access_field, dtype="int") self.batched_factors = tensorlib.astensor(self._batched_factors) def has_pdf(self): diff --git a/src/pyhf/events.py b/src/pyhf/events.py index ae55e2d4f1..a3b7022319 100644 --- a/src/pyhf/events.py +++ b/src/pyhf/events.py @@ -40,8 +40,9 @@ def append(self, callback): """ try: # methods - callback_ref = weakref.ref(callback.__func__), weakref.ref( - callback.__self__ + callback_ref = ( + weakref.ref(callback.__func__), + weakref.ref(callback.__self__), ) except AttributeError: callback_ref = weakref.ref(callback), None diff --git a/src/pyhf/exceptions/__init__.py b/src/pyhf/exceptions/__init__.py index 6d7e55f12f..9bf61e8e74 100644 --- a/src/pyhf/exceptions/__init__.py +++ b/src/pyhf/exceptions/__init__.py @@ -60,13 +60,13 @@ def __init__(self, ValidationError, schema=None): self.exc_info = sys.exc_info() self.parent = ValidationError self.schema = schema - self.path = '' + self.path = "" for item in ValidationError.path: if isinstance(item, int): - self.path += f'[{item}]' + self.path += f"[{item}]" else: - self.path += f'.{item}' - self.path = self.path.lstrip('.') + self.path += f".{item}" + self.path = self.path.lstrip(".") self.instance = ValidationError.instance message = f"{ValidationError.message}.\n\tPath: {self.path}\n\tInstance: {self.instance} Schema: {self.schema}" # Call the base class constructor with the parameters it needs @@ -173,7 +173,7 @@ class FailedMinimization(Exception): def __init__(self, result): self.result = result message = getattr( - result, 'message', "Unknown failure. See fit result for more details." + result, "message", "Unknown failure. See fit result for more details." ) super().__init__(message) diff --git a/src/pyhf/infer/__init__.py b/src/pyhf/infer/__init__.py index 3f43ae34fe..150f1fe34f 100644 --- a/src/pyhf/infer/__init__.py +++ b/src/pyhf/infer/__init__.py @@ -7,12 +7,12 @@ def _check_hypotest_prerequisites(pdf, data, init_pars, par_bounds, fixed_params): if pdf.config.poi_index is None: raise exceptions.UnspecifiedPOI( - 'No POI is defined. A POI is required to run a hypothesis test.' + "No POI is defined. A POI is required to run a hypothesis test." ) if not utils.all_pois_floating(pdf, fixed_params): raise exceptions.InvalidModel( - f'POI at index [{pdf.config.poi_index}] is set as fixed, which makes profile likelihood ratio based inference impossible. Please unfix the POI to continue.' + f"POI at index [{pdf.config.poi_index}] is set as fixed, which makes profile likelihood ratio based inference impossible. Please unfix the POI to continue." ) @@ -179,7 +179,7 @@ def hypotest( sig_plus_bkg_distribution, bkg_only_distribution ) - is_q0 = kwargs.get('test_stat', 'qtilde') == 'q0' + is_q0 = kwargs.get("test_stat", "qtilde") == "q0" _returns = [CLsb_obs if is_q0 else CLs_obs] if return_tail_probs: diff --git a/src/pyhf/infer/calculators.py b/src/pyhf/infer/calculators.py index ff5f59026a..c830ea7371 100644 --- a/src/pyhf/infer/calculators.py +++ b/src/pyhf/infer/calculators.py @@ -213,11 +213,11 @@ class HypoTestFitResults: """ # ignore "F821 undefined name 'Tensor'" so as to avoid typing.Any - asimov_pars: 'Tensor' # noqa: F821 - free_fit_to_data: 'Tensor' # noqa: F821 - free_fit_to_asimov: 'Tensor' # noqa: F821 - fixed_poi_fit_to_data: 'Tensor' # noqa: F821 - fixed_poi_fit_to_asimov: 'Tensor' # noqa: F821 + asimov_pars: "Tensor" # noqa: F821 + free_fit_to_data: "Tensor" # noqa: F821 + free_fit_to_asimov: "Tensor" # noqa: F821 + fixed_poi_fit_to_data: "Tensor" # noqa: F821 + fixed_poi_fit_to_asimov: "Tensor" # noqa: F821 class AsymptoticCalculator: @@ -377,7 +377,7 @@ def teststatistic(self, poi_test): ) sqrtqmu_v = tensorlib.sqrt(qmu_v) - asimov_mu = 1.0 if self.test_stat == 'q0' else 0.0 + asimov_mu = 1.0 if self.test_stat == "q0" else 0.0 asimov_data, asimov_mubhathat = generate_asimov_data( asimov_mu, @@ -770,7 +770,7 @@ def distributions(self, poi_test, track_progress=None): signal_sample = signal_pdf.sample(sample_shape) bkg_pars = fixed_poi_fit( - 1.0 if self.test_stat == 'q0' else 0.0, + 1.0 if self.test_stat == "q0" else 0.0, self.data, self.pdf, self.init_pars, @@ -788,11 +788,11 @@ def distributions(self, poi_test, track_progress=None): disable=not ( track_progress if track_progress is not None else self.track_progress ), - unit='toy', + unit="toy", ) signal_teststat = [] - for sample in tqdm.tqdm(signal_sample, **tqdm_options, desc='Signal-like'): + for sample in tqdm.tqdm(signal_sample, **tqdm_options, desc="Signal-like"): signal_teststat.append( teststat_func( poi_test, @@ -805,7 +805,7 @@ def distributions(self, poi_test, track_progress=None): ) bkg_teststat = [] - for sample in tqdm.tqdm(bkg_sample, **tqdm_options, desc='Background-like'): + for sample in tqdm.tqdm(bkg_sample, **tqdm_options, desc="Background-like"): bkg_teststat.append( teststat_func( poi_test, diff --git a/src/pyhf/infer/mle.py b/src/pyhf/infer/mle.py index c269eb47c8..3102836f50 100644 --- a/src/pyhf/infer/mle.py +++ b/src/pyhf/infer/mle.py @@ -191,7 +191,7 @@ def fixed_poi_fit( """ if pdf.config.poi_index is None: raise UnspecifiedPOI( - 'No POI is defined. A POI is required to fit with a fixed POI.' + "No POI is defined. A POI is required to fit with a fixed POI." ) init_pars = [*(init_pars or pdf.config.suggested_init())] diff --git a/src/pyhf/infer/test_statistics.py b/src/pyhf/infer/test_statistics.py index f9940c678e..060535dee0 100644 --- a/src/pyhf/infer/test_statistics.py +++ b/src/pyhf/infer/test_statistics.py @@ -129,12 +129,12 @@ def qmu(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=F """ if pdf.config.poi_index is None: raise UnspecifiedPOI( - 'No POI is defined. A POI is required for profile likelihood based test statistics.' + "No POI is defined. A POI is required for profile likelihood based test statistics." ) if par_bounds[pdf.config.poi_index][0] == 0: log.warning( - 'qmu test statistic used for fit configuration with POI bounded at zero.\n' - + 'Use the qmu_tilde test statistic (pyhf.infer.test_statistics.qmu_tilde) instead.\n' + "qmu test statistic used for fit configuration with POI bounded at zero.\n" + + "Use the qmu_tilde test statistic (pyhf.infer.test_statistics.qmu_tilde) instead.\n" + 'If you called this from pyhf.infer.mle or pyhf.infer.hypotest, set test_stat="qtilde".' ) return _qmu_like( @@ -225,12 +225,12 @@ def qmu_tilde( """ if pdf.config.poi_index is None: raise UnspecifiedPOI( - 'No POI is defined. A POI is required for profile likelihood based test statistics.' + "No POI is defined. A POI is required for profile likelihood based test statistics." ) if par_bounds[pdf.config.poi_index][0] != 0: log.warning( - 'qmu_tilde test statistic used for fit configuration with POI not bounded at zero.\n' - + 'Use the qmu test statistic (pyhf.infer.test_statistics.qmu) instead.\n' + "qmu_tilde test statistic used for fit configuration with POI not bounded at zero.\n" + + "Use the qmu test statistic (pyhf.infer.test_statistics.qmu) instead.\n" + 'If you called this from pyhf.infer.mle or pyhf.infer.hypotest, set test_stat="q".' ) return _qmu_like( @@ -308,12 +308,12 @@ def tmu(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=F """ if pdf.config.poi_index is None: raise UnspecifiedPOI( - 'No POI is defined. A POI is required for profile likelihood based test statistics.' + "No POI is defined. A POI is required for profile likelihood based test statistics." ) if par_bounds[pdf.config.poi_index][0] == 0: log.warning( - 'tmu test statistic used for fit configuration with POI bounded at zero.\n' - + 'Use the tmu_tilde test statistic (pyhf.infer.test_statistics.tmu_tilde) instead.' + "tmu test statistic used for fit configuration with POI bounded at zero.\n" + + "Use the tmu_tilde test statistic (pyhf.infer.test_statistics.tmu_tilde) instead." ) return _tmu_like( mu, @@ -398,12 +398,12 @@ def tmu_tilde( """ if pdf.config.poi_index is None: raise UnspecifiedPOI( - 'No POI is defined. A POI is required for profile likelihood based test statistics.' + "No POI is defined. A POI is required for profile likelihood based test statistics." ) if par_bounds[pdf.config.poi_index][0] != 0: log.warning( - 'tmu_tilde test statistic used for fit configuration with POI not bounded at zero.\n' - + 'Use the tmu test statistic (pyhf.infer.test_statistics.tmu) instead.' + "tmu_tilde test statistic used for fit configuration with POI not bounded at zero.\n" + + "Use the tmu test statistic (pyhf.infer.test_statistics.tmu) instead." ) return _tmu_like( mu, @@ -477,11 +477,11 @@ def q0(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=Fa if pdf.config.poi_index is None: raise UnspecifiedPOI( - 'No POI is defined. A POI is required for profile likelihood based test statistics.' + "No POI is defined. A POI is required for profile likelihood based test statistics." ) if mu != 0.0: log.warning( - 'q0 test statistic only used for fit configuration with POI set to zero. Setting mu=0.' + "q0 test statistic only used for fit configuration with POI set to zero. Setting mu=0." ) mu = 0.0 diff --git a/src/pyhf/infer/utils.py b/src/pyhf/infer/utils.py index 27437b110c..96037b8e6a 100644 --- a/src/pyhf/infer/utils.py +++ b/src/pyhf/infer/utils.py @@ -66,7 +66,7 @@ def create_calculator(calctype, *args, **kwargs): Returns: calculator (:obj:`object`): A calculator. """ - return {'asymptotics': AsymptoticCalculator, 'toybased': ToyCalculator}[calctype]( + return {"asymptotics": AsymptoticCalculator, "toybased": ToyCalculator}[calctype]( *args, **kwargs ) diff --git a/src/pyhf/interpolators/__init__.py b/src/pyhf/interpolators/__init__.py index 5c451fec3a..8e65c6ef4e 100644 --- a/src/pyhf/interpolators/__init__.py +++ b/src/pyhf/interpolators/__init__.py @@ -33,7 +33,7 @@ def get(interpcode, do_tensorized_calc=True): 1: code1 if do_tensorized_calc else _slow_code1, 2: code2 if do_tensorized_calc else _slow_code2, 4: code4 if do_tensorized_calc else _slow_code4, - '4p': code4p if do_tensorized_calc else _slow_code4p, + "4p": code4p if do_tensorized_calc else _slow_code4p, } try: @@ -42,7 +42,7 @@ def get(interpcode, do_tensorized_calc=True): raise exceptions.InvalidInterpCode -__all__ = ['code0', 'code1', 'code2', 'code4', 'code4p'] +__all__ = ["code0", "code1", "code2", "code4", "code4p"] def __dir__(): diff --git a/src/pyhf/interpolators/code0.py b/src/pyhf/interpolators/code0.py index 554dec670e..9e99c7e5e9 100644 --- a/src/pyhf/interpolators/code0.py +++ b/src/pyhf/interpolators/code0.py @@ -41,7 +41,7 @@ def __init__(self, histogramssets, subscribe=True): ) self._precompute() if subscribe: - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -72,15 +72,15 @@ def __call__(self, alphasets): # h: histogram affected by modifier # b: bin of histogram alphas_times_deltas_up = tensorlib.einsum( - 'sa,shb->shab', alphasets, self.deltas_up + "sa,shb->shab", alphasets, self.deltas_up ) alphas_times_deltas_dn = tensorlib.einsum( - 'sa,shb->shab', alphasets, self.deltas_dn + "sa,shb->shab", alphasets, self.deltas_dn ) masks = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_positive, self.broadcast_helper + "sa,shb->shab", where_alphasets_positive, self.broadcast_helper ), dtype="bool", ) diff --git a/src/pyhf/interpolators/code1.py b/src/pyhf/interpolators/code1.py index 9c70675b73..2793924cb3 100644 --- a/src/pyhf/interpolators/code1.py +++ b/src/pyhf/interpolators/code1.py @@ -47,7 +47,7 @@ def __init__(self, histogramssets, subscribe=True): self._precompute() if subscribe: - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -55,10 +55,10 @@ def _precompute(self): self.deltas_dn = tensorlib.astensor(self._deltas_dn) self.broadcast_helper = tensorlib.astensor(self._broadcast_helper) self.bases_up = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_up + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_up ) self.bases_dn = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_dn + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_dn ) self.mask_on = tensorlib.ones(self.alphasets_shape) self.mask_off = tensorlib.zeros(self.alphasets_shape) @@ -69,10 +69,10 @@ def _precompute_alphasets(self, alphasets_shape): tensorlib, _ = get_backend() self.alphasets_shape = alphasets_shape self.bases_up = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_up + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_up ) self.bases_dn = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_dn + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_dn ) self.mask_on = tensorlib.ones(self.alphasets_shape) self.mask_off = tensorlib.zeros(self.alphasets_shape) @@ -91,11 +91,11 @@ def __call__(self, alphasets): # h: histogram affected by modifier # b: bin of histogram exponents = tensorlib.einsum( - 'sa,shb->shab', tensorlib.abs(alphasets), self.broadcast_helper + "sa,shb->shab", tensorlib.abs(alphasets), self.broadcast_helper ) masks = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_positive, self.broadcast_helper + "sa,shb->shab", where_alphasets_positive, self.broadcast_helper ), dtype="bool", ) diff --git a/src/pyhf/interpolators/code2.py b/src/pyhf/interpolators/code2.py index e4cb073088..95cb1a435d 100644 --- a/src/pyhf/interpolators/code2.py +++ b/src/pyhf/interpolators/code2.py @@ -48,7 +48,7 @@ def __init__(self, histogramssets, subscribe=True): self._broadcast_helper = default_backend.ones(default_backend.shape(self._a)) self._precompute() if subscribe: - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -89,24 +89,24 @@ def __call__(self, alphasets): # h: histogram affected by modifier # b: bin of histogram value_gt1 = tensorlib.einsum( - 'sa,shb->shab', alphasets - self.mask_on, self.b_plus_2a + "sa,shb->shab", alphasets - self.mask_on, self.b_plus_2a ) value_btwn = tensorlib.einsum( - 'sa,sa,shb->shab', alphasets, alphasets, self.a - ) + tensorlib.einsum('sa,shb->shab', alphasets, self.b) + "sa,sa,shb->shab", alphasets, alphasets, self.a + ) + tensorlib.einsum("sa,shb->shab", alphasets, self.b) value_lt1 = tensorlib.einsum( - 'sa,shb->shab', alphasets + self.mask_off, self.b_minus_2a + "sa,shb->shab", alphasets + self.mask_off, self.b_minus_2a ) masks_gt1 = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_gt1, self.broadcast_helper + "sa,shb->shab", where_alphasets_gt1, self.broadcast_helper ), dtype="bool", ) masks_not_lt1 = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_not_lt1, self.broadcast_helper + "sa,shb->shab", where_alphasets_not_lt1, self.broadcast_helper ), dtype="bool", ) diff --git a/src/pyhf/interpolators/code4.py b/src/pyhf/interpolators/code4.py index 5d382133e5..d155691a92 100644 --- a/src/pyhf/interpolators/code4.py +++ b/src/pyhf/interpolators/code4.py @@ -125,12 +125,12 @@ def __init__(self, histogramssets, subscribe=True, alpha0=1): ] ) self._coefficients = default_backend.einsum( - 'rc,shb,cshb->rshb', A_inverse, self._broadcast_helper, b + "rc,shb,cshb->rshb", A_inverse, self._broadcast_helper, b ) self._precompute() if subscribe: - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -140,15 +140,15 @@ def _precompute(self): self.alpha0 = tensorlib.astensor(self._alpha0) self.coefficients = tensorlib.astensor(self._coefficients) self.bases_up = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_up + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_up ) self.bases_dn = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_dn + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_dn ) self.mask_on = tensorlib.ones(self.alphasets_shape) self.mask_off = tensorlib.zeros(self.alphasets_shape) self.ones = tensorlib.einsum( - 'sa,shb->shab', self.mask_on, self.broadcast_helper + "sa,shb->shab", self.mask_on, self.broadcast_helper ) def _precompute_alphasets(self, alphasets_shape): @@ -157,15 +157,15 @@ def _precompute_alphasets(self, alphasets_shape): tensorlib, _ = get_backend() self.alphasets_shape = alphasets_shape self.bases_up = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_up + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_up ) self.bases_dn = tensorlib.einsum( - 'sa,shb->shab', tensorlib.ones(self.alphasets_shape), self.deltas_dn + "sa,shb->shab", tensorlib.ones(self.alphasets_shape), self.deltas_dn ) self.mask_on = tensorlib.ones(self.alphasets_shape) self.mask_off = tensorlib.zeros(self.alphasets_shape) self.ones = tensorlib.einsum( - 'sa,shb->shab', self.mask_on, self.broadcast_helper + "sa,shb->shab", self.mask_on, self.broadcast_helper ) return @@ -180,7 +180,7 @@ def __call__(self, alphasets): ) masks_gtalpha0 = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_gtalpha0, self.broadcast_helper + "sa,shb->shab", where_alphasets_gtalpha0, self.broadcast_helper ), dtype="bool", ) @@ -191,7 +191,7 @@ def __call__(self, alphasets): ) masks_not_ltalpha0 = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_not_ltalpha0, self.broadcast_helper + "sa,shb->shab", where_alphasets_not_ltalpha0, self.broadcast_helper ), dtype="bool", ) @@ -201,7 +201,7 @@ def __call__(self, alphasets): # h: histogram affected by modifier # b: bin of histogram exponents = tensorlib.einsum( - 'sa,shb->shab', tensorlib.abs(alphasets), self.broadcast_helper + "sa,shb->shab", tensorlib.abs(alphasets), self.broadcast_helper ) # for |alpha| >= alpha0, we want to raise the bases to the exponent=alpha # and for |alpha| < alpha0, we want to raise the bases to the exponent=1 @@ -221,7 +221,7 @@ def __call__(self, alphasets): ) # this is the 1 + sum_i a_i alpha^i value_btwn = tensorlib.ones(exponents.shape) + tensorlib.einsum( - 'rshb,rsa->shab', self.coefficients, alphasets_powers + "rshb,rsa->shab", self.coefficients, alphasets_powers ) # first, build a result where: diff --git a/src/pyhf/interpolators/code4p.py b/src/pyhf/interpolators/code4p.py index a3d292845e..c8b7738412 100644 --- a/src/pyhf/interpolators/code4p.py +++ b/src/pyhf/interpolators/code4p.py @@ -34,7 +34,7 @@ def __init__(self, histogramssets, subscribe=True): ) self._precompute() if subscribe: - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -75,12 +75,12 @@ def __call__(self, alphasets): # for a > 1 alphas_times_deltas_up = tensorlib.einsum( - 'sa,shb->shab', alphasets, self.deltas_up + "sa,shb->shab", alphasets, self.deltas_up ) # for a < -1 alphas_times_deltas_dn = tensorlib.einsum( - 'sa,shb->shab', alphasets, self.deltas_dn + "sa,shb->shab", alphasets, self.deltas_dn ) # for |a| < 1 @@ -89,25 +89,25 @@ def __call__(self, alphasets): tmp2 = asquare * tmp1 + 15.0 tmp3 = asquare * tmp2 - tmp3_times_A = tensorlib.einsum('sa,shb->shab', tmp3, self.A) + tmp3_times_A = tensorlib.einsum("sa,shb->shab", tmp3, self.A) - alphas_times_S = tensorlib.einsum('sa,shb->shab', alphasets, self.S) + alphas_times_S = tensorlib.einsum("sa,shb->shab", alphasets, self.S) deltas = tmp3_times_A + alphas_times_S # end |a| < 1 masks_p1 = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_greater_p1, self.broadcast_helper + "sa,shb->shab", where_alphasets_greater_p1, self.broadcast_helper ), - dtype='bool', + dtype="bool", ) masks_m1 = tensorlib.astensor( tensorlib.einsum( - 'sa,shb->shab', where_alphasets_smaller_m1, self.broadcast_helper + "sa,shb->shab", where_alphasets_smaller_m1, self.broadcast_helper ), - dtype='bool', + dtype="bool", ) return tensorlib.where( diff --git a/src/pyhf/mixins.py b/src/pyhf/mixins.py index 07667ade30..6846f523e1 100644 --- a/src/pyhf/mixins.py +++ b/src/pyhf/mixins.py @@ -20,7 +20,7 @@ class _ChannelSummaryMixin: """ def __init__(self, *args: Any, **kwargs: Sequence[Channel]): - channels = kwargs.pop('channels') + channels = kwargs.pop("channels") super().__init__(*args, **kwargs) self._channels: list[str] = [] self._samples: list[str] = [] @@ -30,15 +30,15 @@ def __init__(self, *args: Any, **kwargs: Sequence[Channel]): # need to keep track in which order we added the constraints # so that we can generate correctly-ordered data for channel in channels: - self._channels.append(channel['name']) - self._channel_nbins[channel['name']] = len(channel['samples'][0]['data']) - for sample in channel['samples']: - self._samples.append(sample['name']) - for modifier_def in sample['modifiers']: + self._channels.append(channel["name"]) + self._channel_nbins[channel["name"]] = len(channel["samples"][0]["data"]) + for sample in channel["samples"]: + self._samples.append(sample["name"]) + for modifier_def in sample["modifiers"]: self._modifiers.append( ( - modifier_def['name'], # mod name - modifier_def['type'], # mod type + modifier_def["name"], # mod name + modifier_def["type"], # mod type ) ) diff --git a/src/pyhf/modifiers/histosys.py b/src/pyhf/modifiers/histosys.py index c76170e36b..a7e35913f8 100644 --- a/src/pyhf/modifiers/histosys.py +++ b/src/pyhf/modifiers/histosys.py @@ -11,13 +11,13 @@ def required_parset(sample_data, modifier_data): return { - 'paramset_type': 'constrained_by_normal', - 'n_parameters': 1, - 'is_scalar': True, - 'inits': (0.0,), - 'bounds': ((-5.0, 5.0),), - 'fixed': False, - 'auxdata': (0.0,), + "paramset_type": "constrained_by_normal", + "n_parameters": 1, + "is_scalar": True, + "inits": (0.0,), + "bounds": ((-5.0, 5.0),), + "fixed": False, + "auxdata": (0.0,), } @@ -32,31 +32,31 @@ def __init__(self, config): self.required_parsets = {} def collect(self, thismod, nom): - lo_data = thismod['data']['lo_data'] if thismod else nom - hi_data = thismod['data']['hi_data'] if thismod else nom + lo_data = thismod["data"]["lo_data"] if thismod else nom + hi_data = thismod["data"]["hi_data"] if thismod else nom maskval = bool(thismod) mask = [maskval] * len(nom) - return {'lo_data': lo_data, 'hi_data': hi_data, 'mask': mask, 'nom_data': nom} + return {"lo_data": lo_data, "hi_data": hi_data, "mask": mask, "nom_data": nom} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'hi_data': [], 'lo_data': [], 'nom_data': [], 'mask': []} + "data", {"hi_data": [], "lo_data": [], "nom_data": [], "mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['lo_data'].append(moddata['lo_data']) - self.builder_data[key][sample]['data']['hi_data'].append(moddata['hi_data']) - self.builder_data[key][sample]['data']['nom_data'].append(moddata['nom_data']) - self.builder_data[key][sample]['data']['mask'].append(moddata['mask']) + self.builder_data[key][sample]["data"]["lo_data"].append(moddata["lo_data"]) + self.builder_data[key][sample]["data"]["hi_data"].append(moddata["hi_data"]) + self.builder_data[key][sample]["data"]["nom_data"].append(moddata["nom_data"]) + self.builder_data[key][sample]["data"]["mask"].append(moddata["mask"]) if thismod: self.required_parsets.setdefault( - thismod['name'], - [required_parset(defined_samp['data'], thismod['data'])], + thismod["name"], + [required_parset(defined_samp["data"], thismod["data"])], ) def finalize(self): @@ -95,17 +95,17 @@ def finalize(self): class histosys_combined: - name = 'histosys' - op_code = 'addition' + name = "histosys" + op_code = "addition" def __init__( - self, modifiers, pdfconfig, builder_data, interpcode='code0', batch_size=None + self, modifiers, pdfconfig, builder_data, interpcode="code0", batch_size=None ): self.batch_size = batch_size self.interpcode = interpcode - assert self.interpcode in ['code0', 'code2', 'code4p'] + assert self.interpcode in ["code0", "code2", "code4p"] - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] histosys_mods = [m for m, _ in modifiers] parfield_shape = ( @@ -120,16 +120,16 @@ def __init__( self._histosys_histoset = [ [ [ - builder_data[m][s]['data']['lo_data'], - builder_data[m][s]['data']['nom_data'], - builder_data[m][s]['data']['hi_data'], + builder_data[m][s]["data"]["lo_data"], + builder_data[m][s]["data"]["nom_data"], + builder_data[m][s]["data"]["hi_data"], ] for s in pdfconfig.samples ] for m in keys ] self._histosys_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] @@ -139,7 +139,7 @@ def __init__( ) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): if not self.param_viewer.index_selection: diff --git a/src/pyhf/modifiers/lumi.py b/src/pyhf/modifiers/lumi.py index 854534d7f9..a655c7c1fa 100644 --- a/src/pyhf/modifiers/lumi.py +++ b/src/pyhf/modifiers/lumi.py @@ -8,14 +8,14 @@ def required_parset(sample_data, modifier_data): return { - 'paramset_type': 'constrained_by_normal', - 'n_parameters': 1, - 'is_scalar': True, - 'inits': None, # lumi - 'bounds': None, # (0, 10*lumi) - 'fixed': False, - 'auxdata': None, # lumi - 'sigmas': None, # lumi * lumirelerror + "paramset_type": "constrained_by_normal", + "n_parameters": 1, + "is_scalar": True, + "inits": None, # lumi + "bounds": None, # (0, 10*lumi) + "fixed": False, + "auxdata": None, # lumi + "sigmas": None, # lumi * lumirelerror } @@ -32,23 +32,23 @@ def __init__(self, config): def collect(self, thismod, nom): maskval = True if thismod else False mask = [maskval] * len(nom) - return {'mask': mask} + return {"mask": mask} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'mask': []} + "data", {"mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['mask'] += moddata['mask'] + self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] if thismod: self.required_parsets.setdefault( - thismod['name'], - [required_parset(defined_samp['data'], thismod['data'])], + thismod["name"], + [required_parset(defined_samp["data"], thismod["data"])], ) def finalize(self): @@ -56,13 +56,13 @@ def finalize(self): class lumi_combined: - name = 'lumi' - op_code = 'multiplication' + name = "lumi" + op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self.batch_size = batch_size - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] lumi_mods = [m for m, _ in modifiers] parfield_shape = ( @@ -73,11 +73,11 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self.param_viewer = ParamViewer(parfield_shape, pdfconfig.par_map, lumi_mods) self._lumi_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): if not self.param_viewer.index_selection: @@ -100,8 +100,8 @@ def apply(self, pars): tensorlib, _ = get_backend() lumis = self.param_viewer.get(pars) if self.batch_size is None: - results_lumi = tensorlib.einsum('msab,x->msab', self.lumi_mask, lumis) + results_lumi = tensorlib.einsum("msab,x->msab", self.lumi_mask, lumis) else: - results_lumi = tensorlib.einsum('msab,xa->msab', self.lumi_mask, lumis) + results_lumi = tensorlib.einsum("msab,xa->msab", self.lumi_mask, lumis) return tensorlib.where(self.lumi_mask_bool, results_lumi, self.lumi_default) diff --git a/src/pyhf/modifiers/normfactor.py b/src/pyhf/modifiers/normfactor.py index 6d8cbd5852..3f2e9bd24e 100644 --- a/src/pyhf/modifiers/normfactor.py +++ b/src/pyhf/modifiers/normfactor.py @@ -8,12 +8,12 @@ def required_parset(sample_data, modifier_data): return { - 'paramset_type': 'unconstrained', - 'n_parameters': 1, - 'is_scalar': True, - 'inits': (1.0,), - 'bounds': ((0, 10),), - 'fixed': False, + "paramset_type": "unconstrained", + "n_parameters": 1, + "is_scalar": True, + "inits": (1.0,), + "bounds": ((0, 10),), + "fixed": False, } @@ -30,23 +30,23 @@ def __init__(self, config): def collect(self, thismod, nom): maskval = True if thismod else False mask = [maskval] * len(nom) - return {'mask': mask} + return {"mask": mask} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'mask': []} + "data", {"mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['mask'] += moddata['mask'] + self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] if thismod: self.required_parsets.setdefault( - thismod['name'], - [required_parset(defined_samp['data'], thismod['data'])], + thismod["name"], + [required_parset(defined_samp["data"], thismod["data"])], ) def finalize(self): @@ -54,13 +54,13 @@ def finalize(self): class normfactor_combined: - name = 'normfactor' - op_code = 'multiplication' + name = "normfactor" + op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self.batch_size = batch_size - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] normfactor_mods = [m for m, _ in modifiers] parfield_shape = ( @@ -73,11 +73,11 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): ) self._normfactor_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -102,12 +102,12 @@ def apply(self, pars): if self.batch_size is None: normfactors = self.param_viewer.get(pars) results_normfactor = tensorlib.einsum( - 'msab,m->msab', self.normfactor_mask, normfactors + "msab,m->msab", self.normfactor_mask, normfactors ) else: normfactors = self.param_viewer.get(pars) results_normfactor = tensorlib.einsum( - 'msab,ma->msab', self.normfactor_mask, normfactors + "msab,ma->msab", self.normfactor_mask, normfactors ) results_normfactor = tensorlib.where( diff --git a/src/pyhf/modifiers/normsys.py b/src/pyhf/modifiers/normsys.py index 8360b498e9..d1d0717c78 100644 --- a/src/pyhf/modifiers/normsys.py +++ b/src/pyhf/modifiers/normsys.py @@ -8,13 +8,13 @@ def required_parset(sample_data, modifier_data): return { - 'paramset_type': 'constrained_by_normal', - 'n_parameters': 1, - 'is_scalar': True, - 'inits': (0.0,), - 'bounds': ((-5.0, 5.0),), - 'fixed': False, - 'auxdata': (0.0,), + "paramset_type": "constrained_by_normal", + "n_parameters": 1, + "is_scalar": True, + "inits": (0.0,), + "bounds": ((-5.0, 5.0),), + "fixed": False, + "auxdata": (0.0,), } @@ -30,34 +30,34 @@ def __init__(self, config): def collect(self, thismod, nom): maskval = True if thismod else False - lo_factor = thismod['data']['lo'] if thismod else 1.0 - hi_factor = thismod['data']['hi'] if thismod else 1.0 + lo_factor = thismod["data"]["lo"] if thismod else 1.0 + hi_factor = thismod["data"]["hi"] if thismod else 1.0 nom_data = [1.0] * len(nom) lo = [lo_factor] * len(nom) # broadcasting hi = [hi_factor] * len(nom) mask = [maskval] * len(nom) - return {'lo': lo, 'hi': hi, 'mask': mask, 'nom_data': nom_data} + return {"lo": lo, "hi": hi, "mask": mask, "nom_data": nom_data} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'hi': [], 'lo': [], 'nom_data': [], 'mask': []} + "data", {"hi": [], "lo": [], "nom_data": [], "mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['nom_data'] += moddata['nom_data'] - self.builder_data[key][sample]['data']['lo'] += moddata['lo'] - self.builder_data[key][sample]['data']['hi'] += moddata['hi'] - self.builder_data[key][sample]['data']['mask'] += moddata['mask'] + self.builder_data[key][sample]["data"]["nom_data"] += moddata["nom_data"] + self.builder_data[key][sample]["data"]["lo"] += moddata["lo"] + self.builder_data[key][sample]["data"]["hi"] += moddata["hi"] + self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] if thismod: self.required_parsets.setdefault( - thismod['name'], - [required_parset(defined_samp['data'], thismod['data'])], + thismod["name"], + [required_parset(defined_samp["data"], thismod["data"])], ) def finalize(self): @@ -65,16 +65,16 @@ def finalize(self): class normsys_combined: - name = 'normsys' - op_code = 'multiplication' + name = "normsys" + op_code = "multiplication" def __init__( - self, modifiers, pdfconfig, builder_data, interpcode='code1', batch_size=None + self, modifiers, pdfconfig, builder_data, interpcode="code1", batch_size=None ): self.interpcode = interpcode - assert self.interpcode in ['code1', 'code4'] + assert self.interpcode in ["code1", "code4"] - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] normsys_mods = [m for m, _ in modifiers] self.batch_size = batch_size @@ -88,16 +88,16 @@ def __init__( self._normsys_histoset = [ [ [ - builder_data[m][s]['data']['lo'], - builder_data[m][s]['data']['nom_data'], - builder_data[m][s]['data']['hi'], + builder_data[m][s]["data"]["lo"], + builder_data[m][s]["data"]["nom_data"], + builder_data[m][s]["data"]["hi"], ] for s in pdfconfig.samples ] for m in keys ] self._normsys_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] @@ -107,7 +107,7 @@ def __init__( ) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): if not self.param_viewer.index_selection: diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index e4c91c968a..43a62c8647 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -10,12 +10,12 @@ def required_parset(sample_data, modifier_data): return { - 'paramset_type': 'unconstrained', - 'n_parameters': len(sample_data), - 'is_scalar': False, - 'inits': (1.0,) * len(sample_data), - 'bounds': ((0.0, 10.0),) * len(sample_data), - 'fixed': False, + "paramset_type": "unconstrained", + "n_parameters": len(sample_data), + "is_scalar": False, + "inits": (1.0,) * len(sample_data), + "bounds": ((0.0, 10.0),) * len(sample_data), + "fixed": False, } @@ -32,23 +32,23 @@ def __init__(self, config): def collect(self, thismod, nom): maskval = True if thismod else False mask = [maskval] * len(nom) - return {'mask': mask} + return {"mask": mask} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'mask': []} + "data", {"mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['mask'] += moddata['mask'] + self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] if thismod: self.required_parsets.setdefault( - thismod['name'], - [required_parset(defined_samp['data'], thismod['data'])], + thismod["name"], + [required_parset(defined_samp["data"], thismod["data"])], ) def finalize(self): @@ -56,8 +56,8 @@ def finalize(self): class shapefactor_combined: - name = 'shapefactor' - op_code = 'multiplication' + name = "shapefactor" + op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): """ @@ -130,7 +130,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): default_backend = pyhf.default_backend self.batch_size = batch_size - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] shapefactor_mods = [m for m, _ in modifiers] parfield_shape = (self.batch_size or 1, pdfconfig.npars) @@ -139,7 +139,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): ) self._shapefactor_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] @@ -180,7 +180,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): ) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): if not self.param_viewer.index_selection: @@ -190,7 +190,7 @@ def _precompute(self): tensorlib.astensor(self._shapefactor_mask, dtype="bool"), (1, 1, self.batch_size or 1, 1), ) - self.access_field = tensorlib.astensor(self._access_field, dtype='int') + self.access_field = tensorlib.astensor(self._access_field, dtype="int") self.shapefactor_default = tensorlib.ones( tensorlib.shape(self.shapefactor_mask) @@ -212,7 +212,7 @@ def apply(self, pars): flat_pars = tensorlib.reshape(pars, (-1,)) shapefactors = tensorlib.gather(flat_pars, self.access_field) results_shapefactor = tensorlib.einsum( - 'mab,s->msab', shapefactors, self.sample_ones + "mab,s->msab", shapefactors, self.sample_ones ) results_shapefactor = tensorlib.where( self.shapefactor_mask, results_shapefactor, self.shapefactor_default diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index c18ac90149..9a3b97853d 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -45,28 +45,28 @@ def __init__(self, config): self.required_parsets = {} def collect(self, thismod, nom): - uncrt = thismod['data'] if thismod else [0.0] * len(nom) + uncrt = thismod["data"] if thismod else [0.0] * len(nom) mask = [True] * len(nom) if thismod else [False] * len(nom) - return {'mask': mask, 'nom_data': nom, 'uncrt': uncrt} + return {"mask": mask, "nom_data": nom, "uncrt": uncrt} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'uncrt': [], 'nom_data': [], 'mask': []} + "data", {"uncrt": [], "nom_data": [], "mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['mask'].append(moddata['mask']) - self.builder_data[key][sample]['data']['uncrt'].append(moddata['uncrt']) - self.builder_data[key][sample]['data']['nom_data'].append(moddata['nom_data']) + self.builder_data[key][sample]["data"]["mask"].append(moddata["mask"]) + self.builder_data[key][sample]["data"]["uncrt"].append(moddata["uncrt"]) + self.builder_data[key][sample]["data"]["nom_data"].append(moddata["nom_data"]) if thismod: self.required_parsets.setdefault( - thismod['name'], - [required_parset(defined_samp['data'], thismod['data'])], + thismod["name"], + [required_parset(defined_samp["data"], thismod["data"])], ) def finalize(self): @@ -97,14 +97,14 @@ def finalize(self): class shapesys_combined: - name = 'shapesys' - op_code = 'multiplication' + name = "shapesys" + op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): default_backend = pyhf.default_backend self.batch_size = batch_size - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] self._shapesys_mods = [m for m, _ in modifiers] parfield_shape = (self.batch_size or 1, pdfconfig.npars) @@ -113,16 +113,16 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): ) self._shapesys_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] self.__shapesys_info = default_backend.astensor( [ [ [ - builder_data[m][s]['data']['mask'], - builder_data[m][s]['data']['nom_data'], - builder_data[m][s]['data']['uncrt'], + builder_data[m][s]["data"]["mask"], + builder_data[m][s]["data"]["nom_data"], + builder_data[m][s]["data"]["uncrt"], ] for s in pdfconfig.samples ] @@ -143,7 +143,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self._reindex_access_field(pdfconfig) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _reindex_access_field(self, pdfconfig): default_backend = pyhf.default_backend @@ -177,7 +177,7 @@ def _precompute(self): self.shapesys_mask = tensorlib.tile( self.shapesys_mask, (1, 1, self.batch_size or 1, 1) ) - self.access_field = tensorlib.astensor(self._access_field, dtype='int') + self.access_field = tensorlib.astensor(self._access_field, dtype="int") self.sample_ones = tensorlib.ones(tensorlib.shape(self.shapesys_mask)[1]) self.shapesys_default = tensorlib.ones(tensorlib.shape(self.shapesys_mask)) @@ -196,7 +196,7 @@ def apply(self, pars): flat_pars = tensorlib.reshape(pars, (-1,)) shapefactors = tensorlib.gather(flat_pars, self.access_field) results_shapesys = tensorlib.einsum( - 'mab,s->msab', shapefactors, self.sample_ones + "mab,s->msab", shapefactors, self.sample_ones ) results_shapesys = tensorlib.where( diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index bb4e9f3f3e..b558cc58dd 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -12,14 +12,14 @@ def required_parset(sigmas, fixed: list[bool]): n_parameters = len(sigmas) return { - 'paramset_type': 'constrained_by_normal', - 'n_parameters': n_parameters, - 'is_scalar': False, - 'inits': (1.0,) * n_parameters, - 'bounds': ((1e-10, 10.0),) * n_parameters, - 'fixed': tuple(fixed), - 'auxdata': (1.0,) * n_parameters, - 'sigmas': tuple(sigmas), + "paramset_type": "constrained_by_normal", + "n_parameters": n_parameters, + "is_scalar": False, + "inits": (1.0,) * n_parameters, + "bounds": ((1e-10, 10.0),) * n_parameters, + "fixed": tuple(fixed), + "auxdata": (1.0,) * n_parameters, + "sigmas": tuple(sigmas), } @@ -34,23 +34,23 @@ def __init__(self, config): self.required_parsets = {} def collect(self, thismod, nom): - uncrt = thismod['data'] if thismod else [0.0] * len(nom) + uncrt = thismod["data"] if thismod else [0.0] * len(nom) mask = [True if thismod else False] * len(nom) - return {'mask': mask, 'nom_data': nom, 'uncrt': uncrt} + return {"mask": mask, "nom_data": nom, "uncrt": uncrt} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'uncrt': [], 'nom_data': [], 'mask': []} + "data", {"uncrt": [], "nom_data": [], "mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['mask'].append(moddata['mask']) - self.builder_data[key][sample]['data']['uncrt'].append(moddata['uncrt']) - self.builder_data[key][sample]['data']['nom_data'].append(moddata['nom_data']) + self.builder_data[key][sample]["data"]["mask"].append(moddata["mask"]) + self.builder_data[key][sample]["data"]["uncrt"].append(moddata["uncrt"]) + self.builder_data[key][sample]["data"]["nom_data"].append(moddata["nom_data"]) def finalize(self): default_backend = pyhf.default_backend @@ -78,13 +78,13 @@ def finalize(self): ) for modname in self.builder_data: - parname = modname.split('/')[1] + parname = modname.split("/")[1] nomsall = default_backend.sum( [ - modifier_data['data']['nom_data'] + modifier_data["data"]["nom_data"] for modifier_data in self.builder_data[modname].values() - if default_backend.astensor(modifier_data['data']['mask']).any() + if default_backend.astensor(modifier_data["data"]["mask"]).any() ], axis=0, ) @@ -92,12 +92,12 @@ def finalize(self): [ [ ( - (modifier_data['data']['uncrt'][binnr] / nomsall[binnr]) + (modifier_data["data"]["uncrt"][binnr] / nomsall[binnr]) ** 2 if nomsall[binnr] > 0 else 0.0 ) - for binnr in range(len(modifier_data['data']['nom_data'])) + for binnr in range(len(modifier_data["data"]["nom_data"])) ] for modifier_data in self.builder_data[modname].values() ], @@ -112,7 +112,7 @@ def finalize(self): masks = {} for modifier_data in self.builder_data[modname].values(): mask_this_sample = default_backend.astensor( - modifier_data['data']['mask'], dtype='bool' + modifier_data["data"]["mask"], dtype="bool" ) if mask_this_sample.any(): if modname not in masks: @@ -134,14 +134,14 @@ def finalize(self): class staterror_combined: - name = 'staterror' - op_code = 'multiplication' + name = "staterror" + op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): default_backend = pyhf.default_backend self.batch_size = batch_size - keys = [f'{mtype}/{m}' for m, mtype in modifiers] + keys = [f"{mtype}/{m}" for m, mtype in modifiers] self._staterr_mods = [m for m, _ in modifiers] parfield_shape = (self.batch_size or 1, pdfconfig.npars) @@ -150,7 +150,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): ) self._staterror_mask = [ - [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[m][s]["data"]["mask"]] for s in pdfconfig.samples] for m in keys ] global_concatenated_bin_indices = [ @@ -165,7 +165,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self._reindex_access_field(pdfconfig) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _reindex_access_field(self, pdfconfig): default_backend = pyhf.default_backend @@ -198,7 +198,7 @@ def _precompute(self): self.staterror_mask = tensorlib.tile( self.staterror_mask, (1, 1, self.batch_size or 1, 1) ) - self.access_field = tensorlib.astensor(self._access_field, dtype='int') + self.access_field = tensorlib.astensor(self._access_field, dtype="int") self.sample_ones = tensorlib.ones(tensorlib.shape(self.staterror_mask)[1]) self.staterror_default = tensorlib.ones(tensorlib.shape(self.staterror_mask)) @@ -212,7 +212,7 @@ def apply(self, pars): else: flat_pars = tensorlib.reshape(pars, (-1,)) statfactors = tensorlib.gather(flat_pars, self.access_field) - results_staterr = tensorlib.einsum('mab,s->msab', statfactors, self.sample_ones) + results_staterr = tensorlib.einsum("mab,s->msab", statfactors, self.sample_ones) results_staterr = tensorlib.where( self.staterror_mask, results_staterr, self.staterror_default ) diff --git a/src/pyhf/optimize/__init__.py b/src/pyhf/optimize/__init__.py index dace3e026e..c055fac8ed 100644 --- a/src/pyhf/optimize/__init__.py +++ b/src/pyhf/optimize/__init__.py @@ -5,7 +5,7 @@ class _OptimizerRetriever: def __getattr__(self, name): - if name == 'scipy_optimizer': + if name == "scipy_optimizer": from pyhf.optimize.opt_scipy import scipy_optimizer assert scipy_optimizer @@ -15,7 +15,7 @@ def __getattr__(self, name): # for autocomplete and dir() calls self.scipy_optimizer = scipy_optimizer return scipy_optimizer - elif name == 'minuit_optimizer': + elif name == "minuit_optimizer": try: from pyhf.optimize.opt_minuit import minuit_optimizer @@ -31,9 +31,9 @@ def __getattr__(self, name): "There was a problem importing Minuit. The minuit optimizer cannot be used.", e, ) - elif name == '__wrapped__': # doctest + elif name == "__wrapped__": # doctest pass OptimizerRetriever = _OptimizerRetriever() -__all__ = ['OptimizerRetriever'] +__all__ = ["OptimizerRetriever"] diff --git a/src/pyhf/optimize/common.py b/src/pyhf/optimize/common.py index 24483fe82f..c7870f5f5c 100644 --- a/src/pyhf/optimize/common.py +++ b/src/pyhf/optimize/common.py @@ -24,7 +24,7 @@ def _make_stitch_pars(tv=None, fixed_values=None): def stitch_pars(pars, stitch_with=fixed_values): tb, _ = get_backend() - return tv.stitch([tb.astensor(stitch_with, dtype='float'), pars]) + return tv.stitch([tb.astensor(stitch_with, dtype="float"), pars]) return stitch_pars @@ -37,16 +37,16 @@ def _get_tensor_shim(): that tensorlib is imported correctly. """ tensorlib, _ = get_backend() - if tensorlib.name == 'numpy': + if tensorlib.name == "numpy": from pyhf.optimize.opt_numpy import wrap_objective as numpy_shim return numpy_shim - if tensorlib.name == 'jax': + if tensorlib.name == "jax": from pyhf.optimize.opt_jax import wrap_objective as jax_shim return jax_shim - raise ValueError(f'No optimizer shim for {tensorlib.name}.') + raise ValueError(f"No optimizer shim for {tensorlib.name}.") def shim( @@ -107,7 +107,7 @@ def shim( if do_stitch: all_init = tensorlib.astensor(init_pars) variable_init = tensorlib.tolist( - tensorlib.gather(all_init, tensorlib.astensor(variable_idx, dtype='int')) + tensorlib.gather(all_init, tensorlib.astensor(variable_idx, dtype="int")) ) variable_bounds = [par_bounds[i] for i in variable_idx] # stitched out the fixed values, so we don't pass any to the underlying minimizer @@ -130,10 +130,10 @@ def shim( stitch_pars, do_grad=do_grad, jit_pieces={ - 'fixed_idx': fixed_idx, - 'variable_idx': variable_idx, - 'fixed_values': fixed_values, - 'do_stitch': do_stitch, + "fixed_idx": fixed_idx, + "variable_idx": variable_idx, + "fixed_values": fixed_values, + "do_stitch": do_stitch, }, ) diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index 19e150f069..5f2cfa681a 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -14,7 +14,7 @@ class OptimizerMixin: """Mixin Class to build optimizers.""" - __slots__ = ['maxiter', 'verbose'] + __slots__ = ["maxiter", "verbose"] def __init__(self, **kwargs): """ @@ -24,8 +24,8 @@ def __init__(self, **kwargs): maxiter (:obj:`int`): maximum number of iterations. Default is 100000. verbose (:obj:`int`): verbose output level during minimization. Default is off (0). """ - self.maxiter = kwargs.pop('maxiter', 100000) - self.verbose = kwargs.pop('verbose', 0) + self.maxiter = kwargs.pop("maxiter", 100000) + self.verbose = kwargs.pop("verbose", 0) if kwargs: raise exceptions.Unsupported( @@ -80,7 +80,7 @@ def _internal_postprocess(self, fitresult, stitch_pars, return_uncertainties=Fal fitted_pars = stitch_pars(tensorlib.astensor(fitresult.x)) # check if uncertainties were provided (and stitch just in case) - uncertainties = getattr(fitresult, 'unc', None) + uncertainties = getattr(fitresult, "unc", None) if uncertainties is not None: # extract number of fixed parameters num_fixed_pars = len(fitted_pars) - len(fitresult.x) @@ -99,7 +99,7 @@ def _internal_postprocess(self, fitresult, stitch_pars, return_uncertainties=Fal if return_uncertainties: fitted_pars = tensorlib.stack([fitted_pars, uncertainties], axis=1) - correlations = getattr(fitresult, 'corr', None) + correlations = getattr(fitresult, "corr", None) if correlations is not None: _zeros = tensorlib.zeros(num_fixed_pars) # possibly a more elegant way to do this diff --git a/src/pyhf/optimize/opt_jax.py b/src/pyhf/optimize/opt_jax.py index 119e6982ba..011067e88f 100644 --- a/src/pyhf/optimize/opt_jax.py +++ b/src/pyhf/optimize/opt_jax.py @@ -13,13 +13,13 @@ def _final_objective( pars, data, fixed_values, fixed_idx, variable_idx, do_stitch, objective, pdf ): - log.debug('jitting function') + log.debug("jitting function") tensorlib, _ = get_backend() pars = tensorlib.astensor(pars) if do_stitch: tv = _TensorViewer([fixed_idx, variable_idx]) constrained_pars = tv.stitch( - [tensorlib.astensor(fixed_values, dtype='float'), pars] + [tensorlib.astensor(fixed_values, dtype="float"), pars] ) else: constrained_pars = pars @@ -56,10 +56,10 @@ def func(pars): return _jitted_objective_and_grad( pars, data, - jit_pieces['fixed_values'], - tuple(jit_pieces['fixed_idx']), - tuple(jit_pieces['variable_idx']), - jit_pieces['do_stitch'], + jit_pieces["fixed_values"], + tuple(jit_pieces["fixed_idx"]), + tuple(jit_pieces["variable_idx"]), + jit_pieces["do_stitch"], objective, pdf, ) @@ -71,10 +71,10 @@ def func(pars): return _jitted_objective( pars, data, - jit_pieces['fixed_values'], - tuple(jit_pieces['fixed_idx']), - tuple(jit_pieces['variable_idx']), - jit_pieces['do_stitch'], + jit_pieces["fixed_values"], + tuple(jit_pieces["fixed_idx"]), + tuple(jit_pieces["variable_idx"]), + jit_pieces["do_stitch"], objective, pdf, ) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 8ba7d094c5..20282dd847 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -12,7 +12,7 @@ class minuit_optimizer(OptimizerMixin): Optimizer that minimizes via :meth:`iminuit.Minuit.migrad`. """ - __slots__ = ['errordef', 'name', 'steps', 'strategy', 'tolerance'] + __slots__ = ["errordef", "name", "steps", "strategy", "tolerance"] def __init__(self, *args, **kwargs): """ @@ -38,11 +38,11 @@ def __init__(self, *args, **kwargs): See specific optimizer for detailed meaning. Default is ``0.1``. """ - self.name = 'minuit' - self.errordef = kwargs.pop('errordef', 1) - self.steps = kwargs.pop('steps', 1000) - self.strategy = kwargs.pop('strategy', None) - self.tolerance = kwargs.pop('tolerance', 0.1) + self.name = "minuit" + self.errordef = kwargs.pop("errordef", 1) + self.steps = kwargs.pop("steps", 1000) + self.strategy = kwargs.pop("strategy", None) + self.tolerance = kwargs.pop("tolerance", 0.1) super().__init__(*args, **kwargs) def _get_minimizer( @@ -103,7 +103,7 @@ def _minimize( Returns: fitresult (scipy.optimize.OptimizeResult): the fit result """ - maxiter = options.pop('maxiter', self.maxiter) + maxiter = options.pop("maxiter", self.maxiter) # do_grad value results in iminuit.Minuit.strategy of either: # 0: Fast. Does not check a user-provided gradient. # 1: Default. Checks user-provided gradient against numerical gradient. @@ -112,7 +112,7 @@ def _minimize( # passing strategy=None as options kwarg if strategy is None: strategy = 0 if do_grad else 1 - tolerance = options.pop('tolerance', self.tolerance) + tolerance = options.pop("tolerance", self.tolerance) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options)}." diff --git a/src/pyhf/optimize/opt_scipy.py b/src/pyhf/optimize/opt_scipy.py index 6b8de59cc8..b0b2a2d0ba 100644 --- a/src/pyhf/optimize/opt_scipy.py +++ b/src/pyhf/optimize/opt_scipy.py @@ -11,7 +11,7 @@ class scipy_optimizer(OptimizerMixin): Optimizer that uses :func:`scipy.optimize.minimize`. """ - __slots__ = ['name', 'solver_options', 'tolerance'] + __slots__ = ["name", "solver_options", "tolerance"] def __init__(self, *args, **kwargs): """ @@ -27,9 +27,9 @@ def __init__(self, *args, **kwargs): :func:`scipy.optimize.show_options` for additional options of optimization solvers. """ - self.name = 'scipy' - self.tolerance = kwargs.pop('tolerance', None) - self.solver_options = kwargs.pop('solver_options', {}) + self.name = "scipy" + self.tolerance = kwargs.pop("tolerance", None) + self.solver_options = kwargs.pop("solver_options", {}) super().__init__(*args, **kwargs) def _get_minimizer( @@ -71,11 +71,11 @@ def _minimize( Returns: fitresult (scipy.optimize.OptimizeResult): the fit result """ - maxiter = options.pop('maxiter', self.maxiter) - verbose = options.pop('verbose', self.verbose) - method = options.pop('method', 'SLSQP') - tolerance = options.pop('tolerance', self.tolerance) - solver_options = options.pop('solver_options', self.solver_options) + maxiter = options.pop("maxiter", self.maxiter) + verbose = options.pop("verbose", self.verbose) + method = options.pop("method", "SLSQP") + tolerance = options.pop("tolerance", self.tolerance) + solver_options = options.pop("solver_options", self.solver_options) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options)}." @@ -85,7 +85,7 @@ def _minimize( indices = [i for i, _ in fixed_vals] values = [v for _, v in fixed_vals] if fixed_vals: - constraints = [{'type': 'eq', 'fun': lambda v: v[indices] - values}] + constraints = [{"type": "eq", "fun": lambda v: v[indices] - values}] # update the initial values to the fixed value for any fixed parameter for idx, fixed_val in fixed_vals: x0[idx] = fixed_val diff --git a/src/pyhf/parameters/__init__.py b/src/pyhf/parameters/__init__.py index 1dc89f22b7..9b7d5affa6 100644 --- a/src/pyhf/parameters/__init__.py +++ b/src/pyhf/parameters/__init__.py @@ -8,12 +8,12 @@ from pyhf.parameters.utils import reduce_paramsets_requirements __all__ = [ - 'ParamViewer', - 'constrained_by_normal', - 'constrained_by_poisson', - 'paramset', - 'reduce_paramsets_requirements', - 'unconstrained', + "ParamViewer", + "constrained_by_normal", + "constrained_by_poisson", + "paramset", + "reduce_paramsets_requirements", + "unconstrained", ] diff --git a/src/pyhf/parameters/paramsets.py b/src/pyhf/parameters/paramsets.py index 3a59d4a1e8..8c1aaa6f06 100644 --- a/src/pyhf/parameters/paramsets.py +++ b/src/pyhf/parameters/paramsets.py @@ -15,15 +15,15 @@ def __dir__(): class paramset: def __init__(self, **kwargs): - self.name = kwargs.pop('name') - self.n_parameters = kwargs.pop('n_parameters') - self.suggested_init = kwargs.pop('inits') - self.suggested_bounds = kwargs.pop('bounds') - self._suggested_fixed = kwargs.pop('fixed') - self.is_scalar = kwargs.pop('is_scalar') + self.name = kwargs.pop("name") + self.n_parameters = kwargs.pop("n_parameters") + self.suggested_init = kwargs.pop("inits") + self.suggested_bounds = kwargs.pop("bounds") + self._suggested_fixed = kwargs.pop("fixed") + self.is_scalar = kwargs.pop("is_scalar") if self.is_scalar and not (self.n_parameters == 1): raise ValueError( - f'misconfigured parameter set {self.name}. Scalar but N>1 parameters.' + f"misconfigured parameter set {self.name}. Scalar but N>1 parameters." ) @property @@ -34,13 +34,13 @@ def suggested_fixed(self) -> list[bool]: @property def suggested_fixed_as_bool(self) -> bool: - '''compresses list of same-value bools into single bool''' + """compresses list of same-value bools into single bool""" suggested_fixed = self.suggested_fixed first = suggested_fixed[0] if all([x == first for x in suggested_fixed]): return first raise RuntimeError( - f'{suggested_fixed} is neither all-True nor all-False, so not compressible' + f"{suggested_fixed} is neither all-True nor all-False, so not compressible" ) @suggested_fixed.setter @@ -65,9 +65,9 @@ def __init__(self, **kwargs): class constrained_by_normal(constrained_paramset): def __init__(self, **kwargs): super().__init__(**kwargs) - self.pdf_type = 'normal' - self.auxdata = kwargs.pop('auxdata') - sigmas = kwargs.pop('sigmas', None) + self.pdf_type = "normal" + self.auxdata = kwargs.pop("auxdata") + sigmas = kwargs.pop("sigmas", None) if sigmas: self.sigmas = sigmas @@ -81,9 +81,9 @@ def width(self): class constrained_by_poisson(constrained_paramset): def __init__(self, **kwargs): super().__init__(**kwargs) - self.pdf_type = 'poisson' - self.auxdata = kwargs.pop('auxdata') - factors = kwargs.pop('factors') + self.pdf_type = "poisson" + self.auxdata = kwargs.pop("auxdata") + factors = kwargs.pop("factors") self.factors = factors def width(self): diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index 170c2ef9ab..eb345e61d0 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -20,8 +20,8 @@ def _tensorviewer_from_parmap(par_map, batch_size): ( ( paramset_name, - paramset_spec['slice'], - paramset_spec['slice'].start, + paramset_spec["slice"], + paramset_spec["slice"].start, ) for paramset_name, paramset_spec in par_map.items() ), @@ -45,11 +45,11 @@ def extract_index_access(baseviewer, subviewer, indices): # the transpose is here so that modifier code doesn't have to do it indices_concatenated = tensorlib.astensor( ( - tensorlib.einsum('ij->ji', stitched) + tensorlib.einsum("ij->ji", stitched) if len(tensorlib.shape(stitched)) > 1 else stitched ), - dtype='int', + dtype="int", ) return index_selection, stitched, indices_concatenated @@ -65,7 +65,7 @@ def __init__(self, shape, par_map, par_selection): batch_size = shape[0] if len(shape) > 1 else None fullsize = default_backend.product(default_backend.astensor(shape)) - flat_indices = default_backend.astensor(range(int(fullsize)), dtype='int') + flat_indices = default_backend.astensor(range(int(fullsize)), dtype="int") self._all_indices = default_backend.reshape(flat_indices, shape) # a tensor viewer that can split and stitch parameters @@ -74,7 +74,7 @@ def __init__(self, shape, par_map, par_selection): # a tensor viewer that can split and stitch the selected parameters self.selected_viewer = _tensorviewer_from_sizes( [ - par_map[s]['slice'].stop - par_map[s]['slice'].start + par_map[s]["slice"].stop - par_map[s]["slice"].start for s in par_selection ], par_selection, @@ -82,7 +82,7 @@ def __init__(self, shape, par_map, par_selection): ) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() diff --git a/src/pyhf/parameters/utils.py b/src/pyhf/parameters/utils.py index d9f1373c4f..c090376e39 100644 --- a/src/pyhf/parameters/utils.py +++ b/src/pyhf/parameters/utils.py @@ -11,15 +11,15 @@ def reduce_paramsets_requirements(paramsets_requirements, paramsets_user_configs reduced_paramsets_requirements = {} paramset_keys = [ - 'paramset_type', - 'n_parameters', - 'is_scalar', - 'inits', - 'bounds', - 'auxdata', - 'factors', - 'sigmas', - 'fixed', + "paramset_type", + "n_parameters", + "is_scalar", + "inits", + "bounds", + "auxdata", + "factors", + "sigmas", + "fixed", ] # - process all defined paramsets @@ -36,7 +36,7 @@ def reduce_paramsets_requirements(paramsets_requirements, paramsets_user_configs for k in paramset_keys: for paramset_requirement in paramset_requirements: # undefined: the modifier does not support configuring that property - v = paramset_requirement.get(k, 'undefined') + v = paramset_requirement.get(k, "undefined") combined_paramset.setdefault(k, set()).add(v) if len(combined_paramset[k]) != 1: @@ -48,23 +48,23 @@ def reduce_paramsets_requirements(paramsets_requirements, paramsets_user_configs # get user-defined-config if it exists or set to default config v = paramset_user_configs.get(k, default_v) # if v is a tuple, it's not user-configured, so convert to list - if v == 'undefined': + if v == "undefined": continue if isinstance(v, tuple): v = list(v) # this implies user-configured, so check that it has the right number of elements elif isinstance(v, list) and default_v and len(v) != len(default_v): raise exceptions.InvalidModel( - f'Incorrect number of values ({len(v)}) for {k} were configured by you, expected {len(default_v)}.' + f"Incorrect number of values ({len(v)}) for {k} were configured by you, expected {len(default_v)}." ) - elif v and default_v == 'undefined': + elif v and default_v == "undefined": raise exceptions.InvalidModel( - f'{paramset_name} does not use the {k} attribute.' + f"{paramset_name} does not use the {k} attribute." ) combined_paramset[k] = v - combined_paramset['name'] = paramset_name + combined_paramset["name"] = paramset_name reduced_paramsets_requirements[paramset_name] = combined_paramset return reduced_paramsets_requirements diff --git a/src/pyhf/patchset.py b/src/pyhf/patchset.py index be0a92d673..c53e80436c 100644 --- a/src/pyhf/patchset.py +++ b/src/pyhf/patchset.py @@ -41,8 +41,8 @@ def __init__(self, spec): patch (:class:`~pyhf.patchset.Patch`): The Patch instance. """ - super().__init__(spec['patch']) - self._metadata = spec['metadata'] + super().__init__(spec["patch"]) + self._metadata = spec["metadata"] @property def metadata(self): @@ -52,12 +52,12 @@ def metadata(self): @property def name(self): """The name of the patch""" - return self.metadata['name'] + return self.metadata["name"] @property def values(self): """The values of the associated labels for the patch""" - return tuple(self.metadata['values']) + return tuple(self.metadata["values"]) def __repr__(self): """Representation of the object""" @@ -161,38 +161,38 @@ def __init__(self, spec, **config_kwargs): patchset (:class:`~pyhf.patchset.PatchSet`): The PatchSet instance. """ - self.schema = config_kwargs.pop('schema', 'patchset.json') - self._version = config_kwargs.pop('version', spec.get('version', None)) + self.schema = config_kwargs.pop("schema", "patchset.json") + self._version = config_kwargs.pop("version", spec.get("version", None)) # run jsonschema validation of input specification against the (provided) schema log.info(f"Validating spec against schema: {self.schema}") schema.validate(spec, self.schema, version=self._version) # set properties based on metadata - self._metadata = spec['metadata'] + self._metadata = spec["metadata"] # list of all patch objects self._patches = [] # look-up table for retrieving patch by name or values - self._patches_by_key = {'name': {}, 'values': {}} + self._patches_by_key = {"name": {}, "values": {}} # inflate all patches - for patchspec in spec['patches']: + for patchspec in spec["patches"]: patch = Patch(patchspec) if patch.name in self._patches_by_key: raise exceptions.InvalidPatchSet( - f'Multiple patches were defined by name for {patch}.' + f"Multiple patches were defined by name for {patch}." ) if patch.values in self._patches_by_key: raise exceptions.InvalidPatchSet( - f'Multiple patches were defined by values for {patch}.' + f"Multiple patches were defined by values for {patch}." ) if len(patch.values) != len(self.labels): raise exceptions.InvalidPatchSet( - f'Incompatible number of values ({len(patch.values)} for {patch} in patchset. Expected {len(self.labels)}.' + f"Incompatible number of values ({len(patch.values)} for {patch} in patchset. Expected {len(self.labels)}." ) # all good, register patch @@ -214,22 +214,22 @@ def metadata(self): @property def references(self): """The references in the PatchSet metadata""" - return self.metadata['references'] + return self.metadata["references"] @property def description(self): """The description in the PatchSet metadata""" - return self.metadata['description'] + return self.metadata["description"] @property def digests(self): """The digests in the PatchSet metadata""" - return self.metadata['digests'] + return self.metadata["digests"] @property def labels(self): """The labels in the PatchSet metadata""" - return self.metadata['labels'] + return self.metadata["labels"] @property def patches(self): diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index 5cc98ad2c6..58ca18dda6 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -28,11 +28,11 @@ def _finalize_parameters_specs(user_parameters, _paramsets_requirements): # build up a dictionary of the parameter configurations provided by the user _paramsets_user_configs = {} for parameter in user_parameters: - if parameter['name'] in _paramsets_user_configs: + if parameter["name"] in _paramsets_user_configs: raise exceptions.InvalidModel( f"Multiple parameter configurations for {parameter['name']} were found." ) - _paramsets_user_configs[parameter.get('name')] = parameter + _paramsets_user_configs[parameter.get("name")] = parameter _reqs = reduce_paramsets_requirements( _paramsets_requirements, _paramsets_user_configs ) @@ -44,7 +44,7 @@ def _create_parameters_from_spec(_reqs): auxdata = [] auxdata_order = [] for param_name, paramset_requirements in _reqs.items(): - paramset_type = getattr(pyhf.parameters, paramset_requirements['paramset_type']) + paramset_type = getattr(pyhf.parameters, paramset_requirements["paramset_type"]) paramset = paramset_type(**paramset_requirements) if paramset.constrained: # is constrained auxdata += paramset.auxdata @@ -60,24 +60,24 @@ def __init__(self, config): self.config = config def append(self, channel, sample, defined_samp): - self.mega_samples.setdefault(sample, {'name': f'mega_{sample}', 'nom': []}) + self.mega_samples.setdefault(sample, {"name": f"mega_{sample}", "nom": []}) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) if not len(nom) == self.config.channel_nbins[channel]: raise exceptions.InvalidModel( - f'expected {self.config.channel_nbins[channel]} size sample data but got {len(nom)}' + f"expected {self.config.channel_nbins[channel]} size sample data but got {len(nom)}" ) - self.mega_samples[sample]['nom'].append(nom) + self.mega_samples[sample]["nom"].append(nom) def finalize(self): default_backend = pyhf.default_backend nominal_rates = default_backend.astensor( [ - default_backend.concatenate(self.mega_samples[sample]['nom']) + default_backend.concatenate(self.mega_samples[sample]["nom"]) for sample in self.config.samples ] ) @@ -115,17 +115,17 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): # 2. make a helper that maps channel-name/sample-name to pairs of channel-sample structs helper = {} _keys_seen = set() - for c in spec['channels']: - for s in c['samples']: + for c in spec["channels"]: + for s in c["samples"]: moddict = {} - for x in s['modifiers']: - if x['type'] not in modifier_set: + for x in s["modifiers"]: + if x["type"] not in modifier_set: raise exceptions.InvalidModifier( - f'{x["type"]} not among {list(modifier_set)}' + f"{x['type']} not among {list(modifier_set)}" ) key = f"{x['type']}/{x['name']}" # check if the modifier to be built is allowed to be shared - if not modifiers_builders[x['type']].is_shared and ( + if not modifiers_builders[x["type"]].is_shared and ( key in _keys_seen or key in moddict ): raise exceptions.InvalidModel( @@ -133,7 +133,7 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): ) moddict[key] = x - helper.setdefault(c['name'], {})[s['name']] = (s, moddict) + helper.setdefault(c["name"], {})[s["name"]] = (s, moddict) # add in all keys seen _keys_seen.update(moddict) @@ -146,7 +146,7 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): ) nominal.append(c, s, defined_samp) for m, mtype in config.modifiers: - key = f'{mtype}/{m}' + key = f"{mtype}/{m}" # this is None if modifier doesn't affect channel/sample. thismod = defined_mods.get(key) if defined_mods else None modifiers_builders[mtype].append(key, c, s, thismod, defined_samp) @@ -165,7 +165,7 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): _required_paramsets.setdefault(pname, []) _required_paramsets[pname] += req_list - user_parameters = spec.get('parameters', []) + user_parameters = spec.get("parameters", []) _required_paramsets = _finalize_parameters_specs( user_parameters, @@ -176,7 +176,7 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): ) if not _required_paramsets: - raise exceptions.InvalidModel('No parameters specified for the Model.') + raise exceptions.InvalidModel("No parameters specified for the Model.") config.set_parameters(_prameter_objects) config.set_auxinfo(_auxdata, _auxdata_order) @@ -214,15 +214,15 @@ def __init__(self, spec, **config_kwargs): Args: spec (:obj:`jsonable`): The HistFactory JSON specification. """ - super().__init__(channels=spec['channels']) + super().__init__(channels=spec["channels"]) default_modifier_settings = { - 'normsys': {'interpcode': 'code4'}, - 'histosys': {'interpcode': 'code4p'}, + "normsys": {"interpcode": "code4"}, + "histosys": {"interpcode": "code4p"}, } self.modifier_settings = config_kwargs.pop( - 'modifier_settings', default_modifier_settings + "modifier_settings", default_modifier_settings ) if config_kwargs: @@ -315,7 +315,7 @@ def suggested_init(self): """ init = [] for name in self.par_order: - init = init + self.par_map[name]['paramset'].suggested_init + init = init + self.par_map[name]["paramset"].suggested_init return init def suggested_bounds(self): @@ -335,7 +335,7 @@ def suggested_bounds(self): """ bounds = [] for name in self.par_order: - bounds = bounds + self.par_map[name]['paramset'].suggested_bounds + bounds = bounds + self.par_map[name]["paramset"].suggested_bounds return bounds def par_slice(self, name): @@ -354,7 +354,7 @@ def par_slice(self, name): >>> model.config.par_slice("uncorr_bkguncrt") slice(1, 3, None) """ - return self.par_map[name]['slice'] + return self.par_map[name]["slice"] @property def par_names(self): @@ -382,7 +382,7 @@ def par_names(self): continue _names.extend( - [f'{name}[{index}]' for index in range(param_set.n_parameters)] + [f"{name}[{index}]" for index in range(param_set.n_parameters)] ) return _names @@ -402,7 +402,7 @@ def param_set(self, name): >>> param_set.pdf_type 'poisson' """ - return self.par_map[name]['paramset'] + return self.par_map[name]["paramset"] def suggested_fixed(self) -> list[bool]: """ @@ -434,7 +434,7 @@ def suggested_fixed(self) -> list[bool]: """ fixed = [] for name in self.par_order: - paramset = self.par_map[name]['paramset'] + paramset = self.par_map[name]["paramset"] fixed += paramset.suggested_fixed return fixed @@ -474,7 +474,7 @@ def _create_and_register_paramsets(self, required_paramsets): next_index = 0 for param_name, paramset in required_paramsets.items(): log.info( - 'adding modifier %s (%s new nuisance parameters)', + "adding modifier %s (%s new nuisance parameters)", param_name, paramset.n_parameters, ) @@ -483,7 +483,7 @@ def _create_and_register_paramsets(self, required_paramsets): next_index = next_index + paramset.n_parameters self._par_order.append(param_name) - self.par_map[param_name] = {'slice': sl, 'paramset': paramset} + self.par_map[param_name] = {"slice": sl, "paramset": paramset} class _ConstraintModel: @@ -614,7 +614,7 @@ def __init__( self._factor_mods.append(modifier_applier.name) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -725,7 +725,7 @@ def expected_data(self, pars, return_by_sample=False): ) if return_by_sample: - batch_first = tensorlib.einsum('ij...->ji...', newbysample) + batch_first = tensorlib.einsum("ij...->ji...", newbysample) if self.batch_size is None: return batch_first[0] return batch_first @@ -773,8 +773,8 @@ def __init__( self.batch_size = batch_size # deep-copy "spec" as it may be modified by config self.spec = copy.deepcopy(spec) - self.schema = config_kwargs.pop('schema', 'model.json') - self.version = config_kwargs.pop('version', None) + self.schema = config_kwargs.pop("schema", "model.json") + self.version = config_kwargs.pop("version", None) # run jsonschema validation of input specification against the (provided) schema if validate: log.info(f"Validating spec against schema: {self.schema:s}") @@ -813,7 +813,7 @@ def __init__( if self.constraint_model.has_pdf(): sizes.append(self.config.nauxdata) self.fullpdf_tv = _tensorviewer_from_sizes( - sizes, ['main', 'aux'], self.batch_size + sizes, ["main", "aux"], self.batch_size ) @property @@ -950,14 +950,14 @@ def logpdf(self, pars, data): # Verify parameter and data shapes if pars.shape[-1] != self.config.npars: raise exceptions.InvalidPdfParameters( - f'eval failed as pars has len {pars.shape[-1]} but {self.config.npars} was expected' + f"eval failed as pars has len {pars.shape[-1]} but {self.config.npars} was expected" ) if data.shape[-1] != self.nominal_rates.shape[-1] + len( self.config.auxdata ): raise exceptions.InvalidPdfData( - f'eval failed as data has len {data.shape[-1]} but {self.config.nmaindata + self.config.nauxdata} was expected' + f"eval failed as data has len {data.shape[-1]} but {self.config.nmaindata + self.config.nauxdata} was expected" ) result = self.make_pdf(pars).log_prob(data) diff --git a/src/pyhf/readxml.py b/src/pyhf/readxml.py index 8356b00634..9dd1dfd282 100644 --- a/src/pyhf/readxml.py +++ b/src/pyhf/readxml.py @@ -104,8 +104,8 @@ def import_root_histogram( filecache = filecache or __FILECACHE__ # strip leading slashes as uproot doesn't use "/" for top-level - path = path or '' - path = path.strip('/') + path = path or "" + path = path.strip("/") fullpath = str(resolver(filename)) if fullpath not in filecache: f = uproot.open(fullpath) @@ -122,7 +122,7 @@ def import_root_histogram( hist = f[fullname] else: raise KeyError( - f'Both {name} and {fullname} were tried and not found in {fullpath}' + f"Both {name} and {fullname} were tried and not found in {fullpath}" ) return hist.to_numpy()[0].tolist(), extract_error(hist) @@ -135,21 +135,21 @@ def process_sample( channel_name: str, track_progress: bool = False, ) -> Sample: - inputfile = sample.attrib.get('InputFile', inputfile) - histopath = sample.attrib.get('HistoPath', histopath) - histoname = sample.attrib['HistoName'] + inputfile = sample.attrib.get("InputFile", inputfile) + histopath = sample.attrib.get("HistoPath", histopath) + histoname = sample.attrib["HistoName"] data, err = import_root_histogram(resolver, inputfile, histopath, histoname) parameter_configs: MutableSequence[Parameter] = [] modifiers: MutableSequence[Modifier] = [] # first check if we need to add lumi modifier for this sample - if sample.attrib.get("NormalizeByTheory", "False") == 'True': - modifier_lumi: LumiSys = {'name': 'lumi', 'type': 'lumi', 'data': None} + if sample.attrib.get("NormalizeByTheory", "False") == "True": + modifier_lumi: LumiSys = {"name": "lumi", "type": "lumi", "data": None} modifiers.append(modifier_lumi) modtags = tqdm.tqdm( - sample.iter(), unit='modifier', disable=not (track_progress), total=len(sample) + sample.iter(), unit="modifier", disable=not (track_progress), total=len(sample) ) for modtag in modtags: @@ -158,105 +158,105 @@ def process_sample( ) if modtag == sample: continue - if modtag.tag == 'OverallSys': + if modtag.tag == "OverallSys": modifier_normsys: NormSys = { - 'name': modtag.attrib['Name'], - 'type': 'normsys', - 'data': { - 'lo': float(modtag.attrib['Low']), - 'hi': float(modtag.attrib['High']), + "name": modtag.attrib["Name"], + "type": "normsys", + "data": { + "lo": float(modtag.attrib["Low"]), + "hi": float(modtag.attrib["High"]), }, } modifiers.append(modifier_normsys) - elif modtag.tag == 'NormFactor': + elif modtag.tag == "NormFactor": modifier_normfactor: NormFactor = { - 'name': modtag.attrib['Name'], - 'type': 'normfactor', - 'data': None, + "name": modtag.attrib["Name"], + "type": "normfactor", + "data": None, } modifiers.append(modifier_normfactor) parameter_config: Parameter = { - 'name': modtag.attrib['Name'], - 'bounds': [[float(modtag.attrib['Low']), float(modtag.attrib['High'])]], - 'inits': [float(modtag.attrib['Val'])], + "name": modtag.attrib["Name"], + "bounds": [[float(modtag.attrib["Low"]), float(modtag.attrib["High"])]], + "inits": [float(modtag.attrib["Val"])], } - if modtag.attrib.get('Const'): - parameter_config['fixed'] = modtag.attrib['Const'] == 'True' + if modtag.attrib.get("Const"): + parameter_config["fixed"] = modtag.attrib["Const"] == "True" parameter_configs.append(parameter_config) - elif modtag.tag == 'HistoSys': + elif modtag.tag == "HistoSys": lo, _ = import_root_histogram( resolver, - modtag.attrib.get('HistoFileLow', inputfile), - modtag.attrib.get('HistoPathLow', ''), - modtag.attrib['HistoNameLow'], + modtag.attrib.get("HistoFileLow", inputfile), + modtag.attrib.get("HistoPathLow", ""), + modtag.attrib["HistoNameLow"], ) hi, _ = import_root_histogram( resolver, - modtag.attrib.get('HistoFileHigh', inputfile), - modtag.attrib.get('HistoPathHigh', ''), - modtag.attrib['HistoNameHigh'], + modtag.attrib.get("HistoFileHigh", inputfile), + modtag.attrib.get("HistoPathHigh", ""), + modtag.attrib["HistoNameHigh"], ) modifier_histosys: HistoSys = { - 'name': modtag.attrib['Name'], - 'type': 'histosys', - 'data': {'lo_data': lo, 'hi_data': hi}, + "name": modtag.attrib["Name"], + "type": "histosys", + "data": {"lo_data": lo, "hi_data": hi}, } modifiers.append(modifier_histosys) - elif modtag.tag == 'StatError' and modtag.attrib['Activate'] == 'True': - if modtag.attrib.get('HistoName', '') == '': + elif modtag.tag == "StatError" and modtag.attrib["Activate"] == "True": + if modtag.attrib.get("HistoName", "") == "": staterr = err else: extstat, _ = import_root_histogram( resolver, - modtag.attrib.get('HistoFile', inputfile), - modtag.attrib.get('HistoPath', ''), - modtag.attrib['HistoName'], + modtag.attrib.get("HistoFile", inputfile), + modtag.attrib.get("HistoPath", ""), + modtag.attrib["HistoName"], ) staterr = np.multiply(extstat, data).tolist() if not staterr: - raise RuntimeError('cannot determine stat error.') + raise RuntimeError("cannot determine stat error.") modifier_staterror: StatError = { - 'name': f'staterror_{channel_name}', - 'type': 'staterror', - 'data': staterr, + "name": f"staterror_{channel_name}", + "type": "staterror", + "data": staterr, } modifiers.append(modifier_staterror) - elif modtag.tag == 'ShapeSys': + elif modtag.tag == "ShapeSys": # NB: ConstraintType is ignored - if modtag.attrib.get('ConstraintType', 'Poisson') != 'Poisson': + if modtag.attrib.get("ConstraintType", "Poisson") != "Poisson": log.warning( - 'shapesys modifier %s has a non-poisson constraint', - modtag.attrib['Name'], + "shapesys modifier %s has a non-poisson constraint", + modtag.attrib["Name"], ) shapesys_data, _ = import_root_histogram( resolver, - modtag.attrib.get('InputFile', inputfile), - modtag.attrib.get('HistoPath', ''), - modtag.attrib['HistoName'], + modtag.attrib.get("InputFile", inputfile), + modtag.attrib.get("HistoPath", ""), + modtag.attrib["HistoName"], ) # NB: we convert relative uncertainty to absolute uncertainty modifier_shapesys: ShapeSys = { - 'name': modtag.attrib['Name'], - 'type': 'shapesys', - 'data': [a * b for a, b in zip(data, shapesys_data)], + "name": modtag.attrib["Name"], + "type": "shapesys", + "data": [a * b for a, b in zip(data, shapesys_data)], } modifiers.append(modifier_shapesys) - elif modtag.tag == 'ShapeFactor': + elif modtag.tag == "ShapeFactor": modifier_shapefactor: ShapeFactor = { - 'name': modtag.attrib['Name'], - 'type': 'shapefactor', - 'data': None, + "name": modtag.attrib["Name"], + "type": "shapefactor", + "data": None, } modifiers.append(modifier_shapefactor) else: - log.warning('not considering modifier tag %s', modtag) + log.warning("not considering modifier tag %s", modtag) return { - 'name': sample.attrib['Name'], - 'data': data, - 'modifiers': modifiers, - 'parameter_configs': parameter_configs, + "name": sample.attrib["Name"], + "data": data, + "modifiers": modifiers, + "parameter_configs": parameter_configs, } @@ -266,9 +266,9 @@ def process_data( inputfile: str, histopath: str, ) -> list[float]: - inputfile = sample.attrib.get('InputFile', inputfile) - histopath = sample.attrib.get('HistoPath', histopath) - histoname = sample.attrib['HistoName'] + inputfile = sample.attrib.get("InputFile", inputfile) + histopath = sample.attrib.get("HistoPath", histopath) + histoname = sample.attrib["HistoName"] if inputfile == "" or histoname == "": raise NotImplementedError( @@ -284,16 +284,16 @@ def process_channel( ) -> tuple[str, list[float], list[Sample], list[Parameter]]: channel = channelxml.getroot() - inputfile = channel.attrib.get('InputFile', '') - histopath = channel.attrib.get('HistoPath', '') + inputfile = channel.attrib.get("InputFile", "") + histopath = channel.attrib.get("HistoPath", "") samples = tqdm.tqdm( - channel.findall('Sample'), unit='sample', disable=not (track_progress) + channel.findall("Sample"), unit="sample", disable=not (track_progress) ) - channel_name = channel.attrib['Name'] + channel_name = channel.attrib["Name"] - data = channel.findall('Data') + data = channel.findall("Data") if data: parsed_data = process_data(data[0], resolver, inputfile, histopath) else: @@ -306,7 +306,7 @@ def process_channel( result = process_sample( sample, resolver, inputfile, histopath, channel_name, track_progress ) - channel_parameter_configs.extend(result.pop('parameter_configs')) + channel_parameter_configs.extend(result.pop("parameter_configs")) results.append(result) return channel_name, parsed_data, results, channel_parameter_configs @@ -330,66 +330,66 @@ def process_measurements( results: list[Measurement] = [] other_parameter_configs = other_parameter_configs if other_parameter_configs else [] - for x in toplvl.findall('Measurement'): + for x in toplvl.findall("Measurement"): parameter_configs_map: MutableMapping[str, Parameter] = { - k['name']: dict(**k) for k in other_parameter_configs + k["name"]: dict(**k) for k in other_parameter_configs } - lumi = float(x.attrib['Lumi']) - lumierr = lumi * float(x.attrib['LumiRelErr']) + lumi = float(x.attrib["Lumi"]) + lumierr = lumi * float(x.attrib["LumiRelErr"]) - measurement_name = x.attrib['Name'] + measurement_name = x.attrib["Name"] - poi = x.find('POI') + poi = x.find("POI") if poi is None: raise RuntimeError( f"Measurement {measurement_name} is missing POI specification" ) result: Measurement = { - 'name': measurement_name, - 'config': { - 'poi': poi.text.strip() if poi.text else '', - 'parameters': [ + "name": measurement_name, + "config": { + "poi": poi.text.strip() if poi.text else "", + "parameters": [ { - 'name': 'lumi', - 'auxdata': [lumi], - 'bounds': [[lumi - 5.0 * lumierr, lumi + 5.0 * lumierr]], - 'inits': [lumi], - 'sigmas': [lumierr], + "name": "lumi", + "auxdata": [lumi], + "bounds": [[lumi - 5.0 * lumierr, lumi + 5.0 * lumierr]], + "inits": [lumi], + "sigmas": [lumierr], } ], }, } - for param in x.findall('ParamSetting'): + for param in x.findall("ParamSetting"): # determine what all parameters in the paramsetting have in common overall_param_obj: ParameterBase = {} - if param.attrib.get('Const'): - overall_param_obj['fixed'] = param.attrib['Const'] == 'True' - if param.attrib.get('Val'): - overall_param_obj['inits'] = [float(param.attrib['Val'])] + if param.attrib.get("Const"): + overall_param_obj["fixed"] = param.attrib["Const"] == "True" + if param.attrib.get("Val"): + overall_param_obj["inits"] = [float(param.attrib["Val"])] # might be specifying multiple parameters in the same ParamSetting if param.text: - for param_name in param.text.strip().split(' '): + for param_name in param.text.strip().split(" "): param_interpretation = compat.interpret_rootname(param_name) # type: ignore[no-untyped-call] - if not param_interpretation['is_scalar']: + if not param_interpretation["is_scalar"]: raise ValueError( f'pyhf does not support setting non-scalar parameters ("gammas") constant, such as for {param_name}.' ) - if param_interpretation['name'] == 'lumi': - result['config']['parameters'][0].update(overall_param_obj) # type: ignore[typeddict-item] + if param_interpretation["name"] == "lumi": + result["config"]["parameters"][0].update(overall_param_obj) # type: ignore[typeddict-item] else: # pop from parameter_configs_map because we don't want to duplicate param_obj: Parameter = parameter_configs_map.pop( - param_interpretation['name'], - {'name': param_interpretation['name']}, + param_interpretation["name"], + {"name": param_interpretation["name"]}, ) # ParamSetting will always take precedence param_obj.update(overall_param_obj) # type: ignore[typeddict-item] # add it back in to the parameter_configs_map - parameter_configs_map[param_interpretation['name']] = param_obj - result['config']['parameters'].extend(parameter_configs_map.values()) + parameter_configs_map[param_interpretation["name"]] = param_obj + result["config"]["parameters"].extend(parameter_configs_map.values()) results.append(result) return results @@ -398,7 +398,7 @@ def process_measurements( def dedupe_parameters(parameters: Sequence[Parameter]) -> list[Parameter]: duplicates: MutableMapping[str, MutableSequence[Parameter]] = {} for p in parameters: - duplicates.setdefault(p['name'], []).append(p) + duplicates.setdefault(p["name"], []).append(p) for parname in duplicates: parameter_list = duplicates[parname] if len(parameter_list) == 1: @@ -407,10 +407,10 @@ def dedupe_parameters(parameters: Sequence[Parameter]) -> list[Parameter]: for p in parameter_list: log.warning(p) raise RuntimeError( - f'cannot import workspace due to incompatible parameter configurations for {parname:s}.' + f"cannot import workspace due to incompatible parameter configurations for {parname:s}." ) # no errors raised, de-dupe and return - return list({v['name']: v for v in parameters}.values()) + return list({v["name"]: v for v in parameters}.values()) def parse( @@ -436,8 +436,8 @@ def parse( mounts = mounts or [] toplvl = ET.parse(configfile) inputs = tqdm.tqdm( - [x.text for x in toplvl.findall('Input') if x.text], - unit='channel', + [x.text for x in toplvl.findall("Input") if x.text], + unit="channel", disable=not (track_progress), ) @@ -448,12 +448,12 @@ def parse( observations: MutableSequence[Observation] = [] parameter_configs = [] for inp in inputs: - inputs.set_description(f'Processing {inp}') + inputs.set_description(f"Processing {inp}") channel, data, samples, channel_parameter_configs = process_channel( ET.parse(resolver(inp)), resolver, track_progress ) - channels.append({'name': channel, 'samples': samples}) - observations.append({'name': channel, 'data': data}) + channels.append({"name": channel, "samples": samples}) + observations.append({"name": channel, "data": data}) parameter_configs.extend(channel_parameter_configs) parameter_configs = dedupe_parameters(parameter_configs) @@ -461,13 +461,13 @@ def parse( toplvl, other_parameter_configs=parameter_configs ) result: Workspace = { - 'measurements': measurements, - 'channels': channels, - 'observations': observations, - 'version': schema.version, # type: ignore[typeddict-unknown-key] + "measurements": measurements, + "channels": channels, + "observations": observations, + "version": schema.version, # type: ignore[typeddict-unknown-key] } try: - schema.validate(result, 'workspace.json') + schema.validate(result, "workspace.json") except exceptions.InvalidSpecification as exc: if validation_as_error: raise exc diff --git a/src/pyhf/schema/loader.py b/src/pyhf/schema/loader.py index fd293b2559..6cb34e6652 100644 --- a/src/pyhf/schema/loader.py +++ b/src/pyhf/schema/loader.py @@ -33,7 +33,7 @@ def load_schema(schema_id: str): """ try: return variables.SCHEMA_CACHE[ - f'{Path(variables.SCHEMA_BASE).joinpath(schema_id)}' + f"{Path(variables.SCHEMA_BASE).joinpath(schema_id)}" ] except KeyError: pass @@ -42,15 +42,15 @@ def load_schema(schema_id: str): with resources.as_file(ref) as path: if not path.exists(): raise pyhf.exceptions.SchemaNotFound( - f'The schema {schema_id} was not found. Do you have the right version or the right path? {path}' + f"The schema {schema_id} was not found. Do you have the right version or the right path? {path}" ) with path.open(encoding="utf-8") as json_schema: schema = json.load(json_schema) - variables.SCHEMA_CACHE[schema['$id']] = schema - return variables.SCHEMA_CACHE[schema['$id']] + variables.SCHEMA_CACHE[schema["$id"]] = schema + return variables.SCHEMA_CACHE[schema["$id"]] # pre-populate the cache to avoid network access # on first validation in standard usage # (not in pyhf.schema.variables to avoid circular imports) -load_schema(f'{variables.SCHEMA_VERSION}/defs.json') +load_schema(f"{variables.SCHEMA_VERSION}/defs.json") diff --git a/src/pyhf/schema/variables.py b/src/pyhf/schema/variables.py index d02cc6b322..f577022c51 100644 --- a/src/pyhf/schema/variables.py +++ b/src/pyhf/schema/variables.py @@ -1,7 +1,7 @@ from importlib import resources -schemas = resources.files('pyhf') / "schemas" +schemas = resources.files("pyhf") / "schemas" SCHEMA_CACHE = {} SCHEMA_BASE = "https://scikit-hep.org/pyhf/schemas/" -SCHEMA_VERSION = '1.0.0' +SCHEMA_VERSION = "1.0.0" diff --git a/src/pyhf/simplemodels.py b/src/pyhf/simplemodels.py index 805bb0ffd9..3dd3025acc 100644 --- a/src/pyhf/simplemodels.py +++ b/src/pyhf/simplemodels.py @@ -127,25 +127,25 @@ def uncorrelated_background( .. versionchanged:: 0.8.0 Added ``poi_name`` argument. """ spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': signal, - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": signal, + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': bkg, - 'modifiers': [ + "name": "background", + "data": bkg, + "modifiers": [ { - 'name': 'uncorr_bkguncrt', - 'type': 'shapesys', - 'data': bkg_uncertainty, + "name": "uncorr_bkguncrt", + "type": "shapesys", + "data": bkg_uncertainty, } ], }, diff --git a/src/pyhf/tensor/__init__.py b/src/pyhf/tensor/__init__.py index 0caa994448..5e5d77d1d4 100644 --- a/src/pyhf/tensor/__init__.py +++ b/src/pyhf/tensor/__init__.py @@ -14,7 +14,7 @@ def __init__(self): self._array_subtypes = set() def __getattr__(self, name): - if name == 'numpy_backend': + if name == "numpy_backend": from pyhf.tensor.numpy_backend import numpy_backend assert numpy_backend @@ -23,7 +23,7 @@ def __getattr__(self, name): self._array_types.add(numpy_backend.array_type) self._array_subtypes.add(numpy_backend.array_subtype) return numpy_backend - elif name == 'jax_backend': + elif name == "jax_backend": try: from pyhf.tensor.jax_backend import jax_backend @@ -49,4 +49,4 @@ def array_subtypes(self): BackendRetriever = _BackendRetriever() -__all__ = ['BackendRetriever'] +__all__ = ["BackendRetriever"] diff --git a/src/pyhf/tensor/common.py b/src/pyhf/tensor/common.py index 5b6623c233..8971911662 100644 --- a/src/pyhf/tensor/common.py +++ b/src/pyhf/tensor/common.py @@ -21,18 +21,18 @@ def __init__(self, indices, batch_size=None, names=None): self.names = names self._partition_indices = indices _concat_indices = default_backend.astensor( - default_backend.concatenate(self._partition_indices), dtype='int' + default_backend.concatenate(self._partition_indices), dtype="int" ) self._sorted_indices = default_backend.tolist(_concat_indices.argsort()) self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() - self.sorted_indices = tensorlib.astensor(self._sorted_indices, dtype='int') + self.sorted_indices = tensorlib.astensor(self._sorted_indices, dtype="int") self.partition_indices = [ - tensorlib.astensor(idx, dtype='int') for idx in self._partition_indices + tensorlib.astensor(idx, dtype="int") for idx in self._partition_indices ] if self.names: self.name_map = dict(zip(self.names, self.partition_indices)) @@ -45,9 +45,9 @@ def stitch(self, data): if len(tensorlib.shape(data)) == 1: stitched = tensorlib.gather(data, self.sorted_indices) else: - data = tensorlib.einsum('...j->j...', data) + data = tensorlib.einsum("...j->j...", data) stitched = tensorlib.gather(data, self.sorted_indices) - stitched = tensorlib.einsum('j...->...j', stitched) + stitched = tensorlib.einsum("j...->...j", stitched) return stitched def split(self, data, selection=None): @@ -59,9 +59,9 @@ def split(self, data, selection=None): ) if len(tensorlib.shape(data)) == 1: return [tensorlib.gather(data, idx) for idx in indices] - data = tensorlib.einsum('...j->j...', tensorlib.astensor(data)) + data = tensorlib.einsum("...j->j...", tensorlib.astensor(data)) return [ - tensorlib.einsum('j...->...j', tensorlib.gather(data, idx)) + tensorlib.einsum("j...->...j", tensorlib.gather(data, idx)) for idx in indices ] diff --git a/src/pyhf/tensor/jax_backend.py b/src/pyhf/tensor/jax_backend.py index 25685bb550..f0c69cde4e 100644 --- a/src/pyhf/tensor/jax_backend.py +++ b/src/pyhf/tensor/jax_backend.py @@ -1,6 +1,6 @@ from jax import config -config.update('jax_enable_x64', True) +config.update("jax_enable_x64", True) import logging @@ -61,7 +61,7 @@ def log_prob(self, value): class jax_backend: """JAX backend for pyhf""" - __slots__ = ['default_do_grad', 'dtypemap', 'name', 'precision'] + __slots__ = ["default_do_grad", "dtypemap", "name", "precision"] #: The array type for jax array_type = Array @@ -70,12 +70,12 @@ class jax_backend: array_subtype = Array def __init__(self, **kwargs): - self.name = 'jax' - self.precision = kwargs.get('precision', '64b') + self.name = "jax" + self.precision = kwargs.get("precision", "64b") self.dtypemap = { - 'float': jnp.float64 if self.precision == '64b' else jnp.float32, - 'int': jnp.int64 if self.precision == '64b' else jnp.int32, - 'bool': jnp.bool_, + "float": jnp.float64 if self.precision == "64b" else jnp.float32, + "int": jnp.int64 if self.precision == "64b" else jnp.int32, + "bool": jnp.bool_, } self.default_do_grad = True @@ -241,7 +241,7 @@ def astensor(self, tensor_in, dtype="float"): dtype = self.dtypemap[dtype] except KeyError: log.error( - 'Invalid dtype: dtype must be float, int, or bool.', exc_info=True + "Invalid dtype: dtype must be float, int, or bool.", exc_info=True ) raise diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index a4a6ec3541..9c162e20b4 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -19,8 +19,8 @@ class HasState(Protocol): this: HasState = sys.modules[__name__] this.state = { - 'default': (None, None), # type: ignore[typeddict-item] - 'current': (None, None), # type: ignore[typeddict-item] + "default": (None, None), # type: ignore[typeddict-item] + "current": (None, None), # type: ignore[typeddict-item] } @@ -49,11 +49,11 @@ def get_backend(default: bool = False) -> tuple[TensorBackend, Optimizer]: _default_backend: TensorBackend = BackendRetriever.numpy_backend() _default_optimizer: Optimizer = OptimizerRetriever.scipy_optimizer() # type: ignore[no-untyped-call] -this.state['default'] = (_default_backend, _default_optimizer) -this.state['current'] = this.state['default'] +this.state["default"] = (_default_backend, _default_optimizer) +this.state["current"] = this.state["default"] -@events.register('change_backend') +@events.register("change_backend") def set_backend( backend: str | bytes | TensorBackend, custom_optimizer: str | bytes | Optimizer | None = None, @@ -157,25 +157,25 @@ def set_backend( # need to determine if the tensorlib changed or the optimizer changed for events tensorlib_changed = bool( - (new_backend.name != this.state['current'][0].name) - | (new_backend.precision != this.state['current'][0].precision) + (new_backend.name != this.state["current"][0].name) + | (new_backend.precision != this.state["current"][0].precision) ) - optimizer_changed = bool(this.state['current'][1] != new_optimizer) + optimizer_changed = bool(this.state["current"][1] != new_optimizer) # set new backend - this.state['current'] = (new_backend, new_optimizer) + this.state["current"] = (new_backend, new_optimizer) if default: default_tensorlib_changed = bool( - (new_backend.name != this.state['default'][0].name) - | (new_backend.precision != this.state['default'][0].precision) + (new_backend.name != this.state["default"][0].name) + | (new_backend.precision != this.state["default"][0].precision) ) - default_optimizer_changed = bool(this.state['default'][1] != new_optimizer) + default_optimizer_changed = bool(this.state["default"][1] != new_optimizer) # trigger events if default_tensorlib_changed: events.trigger("default_tensorlib_changed")() if default_optimizer_changed: events.trigger("default_optimizer_changed")() - this.state['default'] = this.state['current'] + this.state["default"] = this.state["current"] # trigger events if tensorlib_changed: diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index 0e437d2b75..5b4ddafd8f 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -63,7 +63,7 @@ def log_prob(self, value: Tensor[T]) -> ArrayLike: class numpy_backend(Generic[T]): """NumPy backend for pyhf""" - __slots__ = ['default_do_grad', 'dtypemap', 'name', 'precision'] + __slots__ = ["default_do_grad", "dtypemap", "name", "precision"] #: The array type for numpy array_type = np.ndarray @@ -78,9 +78,9 @@ def __init__(self, **kwargs: str): FloatIntOrBool, DTypeLike, # Type[np.floating[T]] | Type[np.integer[T]] | Type[np.bool_], ] = { - 'float': np.float64 if self.precision == '64b' else np.float32, - 'int': np.int64 if self.precision == '64b' else np.int32, - 'bool': np.bool_, + "float": np.float64 if self.precision == "64b" else np.float32, + "int": np.int64 if self.precision == "64b" else np.int32, + "bool": np.bool_, } self.default_do_grad: bool = False @@ -229,7 +229,7 @@ def isfinite(self, tensor: Tensor[T]) -> NDArray[np.bool_]: return np.isfinite(tensor) def astensor( - self, tensor_in: ArrayLike, dtype: FloatIntOrBool = 'float' + self, tensor_in: ArrayLike, dtype: FloatIntOrBool = "float" ) -> ArrayLike: """ Convert to a NumPy array. @@ -255,7 +255,7 @@ def astensor( dtype_obj = self.dtypemap[dtype] except KeyError: log.error( - 'Invalid dtype: dtype must be float, int, or bool.', exc_info=True + "Invalid dtype: dtype must be float, int, or bool.", exc_info=True ) raise @@ -355,7 +355,10 @@ def percentile( .. versionadded:: 0.7.0 """ # see https://github.com/numpy/numpy/issues/22125 - return cast(ArrayLike, np.percentile(tensor_in, q, axis=axis, interpolation=interpolation)) # type: ignore[call-overload] + return cast( + ArrayLike, + np.percentile(tensor_in, q, axis=axis, interpolation=interpolation), + ) # type: ignore[call-overload] def stack(self, sequence: Sequence[Tensor[T]], axis: int = 0) -> ArrayLike: return np.stack(sequence, axis=axis) diff --git a/src/pyhf/typing.py b/src/pyhf/typing.py index 1a722d404b..5ec5acb8cc 100644 --- a/src/pyhf/typing.py +++ b/src/pyhf/typing.py @@ -71,12 +71,12 @@ class NormSysData(TypedDict): class NormSys(ModifierBase): - type: Literal['normsys'] + type: Literal["normsys"] data: NormSysData class NormFactor(ModifierBase): - type: Literal['normfactor'] + type: Literal["normfactor"] data: None @@ -86,28 +86,28 @@ class HistoSysData(TypedDict): class HistoSys(ModifierBase): - type: Literal['histosys'] + type: Literal["histosys"] data: HistoSysData class StatError(ModifierBase): - type: Literal['staterror'] + type: Literal["staterror"] data: Sequence[float] class ShapeSys(ModifierBase): - type: Literal['shapesys'] + type: Literal["shapesys"] data: Sequence[float] class ShapeFactor(ModifierBase): - type: Literal['shapefactor'] + type: Literal["shapefactor"] data: None class LumiSys(TypedDict): - name: Literal['lumi'] - type: Literal['lumi'] + name: Literal["lumi"] + type: Literal["lumi"] data: None diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index fbf231b70b..8a10e411a2 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -19,30 +19,30 @@ def __dir__(): def options_from_eqdelimstring(opts): - document = '\n'.join( + document = "\n".join( f"{opt.split('=', 1)[0]}: {opt.split('=', 1)[1]}" for opt in opts ) return yaml.safe_load(document) class EqDelimStringParamType(click.ParamType): - name = 'equal-delimited option' + name = "equal-delimited option" def convert(self, value, param, ctx): try: return options_from_eqdelimstring([value]) except IndexError: - self.fail(f'{value:s} is not a valid equal-delimited string', param, ctx) + self.fail(f"{value:s} is not a valid equal-delimited string", param, ctx) class VolumeMountPath(click.Path): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.name = f'{self.name}:{gettext("path")}' + self.name = f"{self.name}:{gettext('path')}" def convert(self, value, param, ctx): try: - path_host, path_mount = value.split(':') + path_host, path_mount = value.split(":") except ValueError: # too many values to unpack / not enough values to unpack self.fail(f"{value!r} is not a valid colon-separated option", param, ctx) @@ -53,7 +53,7 @@ def convert(self, value, param, ctx): ) -def digest(obj, algorithm='sha256'): +def digest(obj, algorithm="sha256"): """ Get the digest for the provided object. Note: object must be JSON-serializable. @@ -82,7 +82,7 @@ def digest(obj, algorithm='sha256'): """ try: - stringified = json.dumps(obj, sort_keys=True, ensure_ascii=False).encode('utf8') + stringified = json.dumps(obj, sort_keys=True, ensure_ascii=False).encode("utf8") except TypeError: raise ValueError( "The supplied object is not JSON-serializable for calculating a hash." @@ -112,10 +112,10 @@ def citation(oneline=False): Returns: citation (:obj:`str`): The citation for this software """ - ref = resources.files('pyhf') / 'data' / 'citation.bib' + ref = resources.files("pyhf") / "data" / "citation.bib" with resources.as_file(ref) as path: data = path.read_text().strip() if oneline: - data = ''.join(data.splitlines()) + data = "".join(data.splitlines()) return data diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 215b38cb42..8ce2252cd3 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -30,7 +30,7 @@ def __dir__(): return __all__ -def _join_items(join, left_items, right_items, key='name', deep_merge_key=None): +def _join_items(join, left_items, right_items, key="name", deep_merge_key=None): """ Join two lists of dictionaries along the given key. @@ -46,7 +46,7 @@ def _join_items(join, left_items, right_items, key='name', deep_merge_key=None): :obj:`list`: A joined list of dictionaries. """ - if join == 'right outer': + if join == "right outer": primary_items, secondary_items = right_items, left_items else: primary_items, secondary_items = left_items, right_items @@ -60,7 +60,7 @@ def _join_items(join, left_items, right_items, key='name', deep_merge_key=None): ] _deep_right_items = secondary_item[deep_merge_key] joined_items[keys.index(secondary_item[key])][deep_merge_key] = _join_items( - 'left outer', _deep_left_items, _deep_right_items + "left outer", _deep_left_items, _deep_right_items ) # next, move over whole items where possible: # - if no join logic @@ -69,10 +69,10 @@ def _join_items(join, left_items, right_items, key='name', deep_merge_key=None): # - if right outer join and item (by name) is on left and not in right # NB: this will be slow for large numbers of items elif ( - join == 'none' - or (join in ['outer'] and secondary_item not in primary_items) + join == "none" + or (join in ["outer"] and secondary_item not in primary_items) or ( - join in ['left outer', 'right outer'] + join in ["left outer", "right outer"] and secondary_item[key] not in keys ) ): @@ -122,20 +122,20 @@ def _join_channels(join, left_channels, right_channels, merge=False): """ joined_channels = _join_items( - join, left_channels, right_channels, deep_merge_key='samples' if merge else None + join, left_channels, right_channels, deep_merge_key="samples" if merge else None ) - if join == 'none': - common_channels = {c['name'] for c in left_channels}.intersection( - c['name'] for c in right_channels + if join == "none": + common_channels = {c["name"] for c in left_channels}.intersection( + c["name"] for c in right_channels ) if common_channels: raise exceptions.InvalidWorkspaceOperation( f"Workspaces cannot have any channels in common with the same name: {common_channels}. You can also try a different join operation: {Workspace.valid_joins}." ) - elif join == 'outer': + elif join == "outer": counted_channels = collections.Counter( - channel['name'] for channel in joined_channels + channel["name"] for channel in joined_channels ) incompatible_channels = [ channel for channel, count in counted_channels.items() if count > 1 @@ -164,18 +164,18 @@ def _join_observations(join, left_observations, right_observations): """ joined_observations = _join_items(join, left_observations, right_observations) - if join == 'none': - common_observations = {obs['name'] for obs in left_observations}.intersection( - obs['name'] for obs in right_observations + if join == "none": + common_observations = {obs["name"] for obs in left_observations}.intersection( + obs["name"] for obs in right_observations ) if common_observations: raise exceptions.InvalidWorkspaceOperation( f"Workspaces cannot have any observations in common with the same name: {common_observations}. You can also try a different join operation: {Workspace.valid_joins}." ) - elif join == 'outer': + elif join == "outer": counted_observations = collections.Counter( - observation['name'] for observation in joined_observations + observation["name"] for observation in joined_observations ) incompatible_observations = [ observation @@ -207,9 +207,9 @@ def _join_parameter_configs(measurement_name, left_parameters, right_parameters) :obj:`list`: A joined list of parameter configurations. Each parameter configuration follows the :obj:`defs.json#/definitions/config` schema """ - joined_parameter_configs = _join_items('outer', left_parameters, right_parameters) + joined_parameter_configs = _join_items("outer", left_parameters, right_parameters) counted_parameter_configs = collections.Counter( - parameter['name'] for parameter in joined_parameter_configs + parameter["name"] for parameter in joined_parameter_configs ) incompatible_parameter_configs = [ parameter for parameter, count in counted_parameter_configs.items() if count > 1 @@ -238,26 +238,26 @@ def _join_measurements(join, left_measurements, right_measurements): """ joined_measurements = _join_items(join, left_measurements, right_measurements) - if join == 'none': - common_measurements = {meas['name'] for meas in left_measurements}.intersection( - meas['name'] for meas in right_measurements + if join == "none": + common_measurements = {meas["name"] for meas in left_measurements}.intersection( + meas["name"] for meas in right_measurements ) if common_measurements: raise exceptions.InvalidWorkspaceOperation( f"Workspaces cannot have any measurements in common with the same name: {common_measurements}. You can also try a different join operation: {Workspace.valid_joins}." ) - elif join == 'outer': + elif join == "outer": # need to store a mapping of measurement name to all measurement objects with that name _measurement_mapping = {} for measurement in joined_measurements: - _measurement_mapping.setdefault(measurement['name'], []).append(measurement) + _measurement_mapping.setdefault(measurement["name"], []).append(measurement) # first check for incompatible POI # then merge parameter configs incompatible_poi = [ measurement_name for measurement_name, measurements in _measurement_mapping.items() - if len({measurement['config']['poi'] for measurement in measurements}) > 1 + if len({measurement["config"]["poi"] for measurement in measurements}) > 1 ] if incompatible_poi: raise exceptions.InvalidWorkspaceOperation( @@ -268,13 +268,13 @@ def _join_measurements(join, left_measurements, right_measurements): for measurement_name, measurements in _measurement_mapping.items(): if len(measurements) != 1: new_measurement = { - 'name': measurement_name, - 'config': { - 'poi': measurements[0]['config']['poi'], - 'parameters': _join_parameter_configs( + "name": measurement_name, + "config": { + "poi": measurements[0]["config"]["poi"], + "parameters": _join_parameter_configs( measurement_name, *( - measurement['config']['parameters'] + measurement["config"]["parameters"] for measurement in measurements ), ), @@ -291,7 +291,7 @@ class Workspace(_ChannelSummaryMixin, dict): A JSON-serializable object that is built from an object that follows the :obj:`workspace.json` `schema `__. """ - valid_joins: ClassVar[list[str]] = ['none', 'outer', 'left outer', 'right outer'] + valid_joins: ClassVar[list[str]] = ["none", "outer", "left outer", "right outer"] def __init__(self, spec, validate: bool = True, **config_kwargs): """ @@ -307,23 +307,23 @@ def __init__(self, spec, validate: bool = True, **config_kwargs): """ spec = copy.deepcopy(spec) - self.schema = config_kwargs.pop('schema', 'workspace.json') - self.version = config_kwargs.pop('version', spec.get('version', None)) + self.schema = config_kwargs.pop("schema", "workspace.json") + self.version = config_kwargs.pop("version", spec.get("version", None)) # run jsonschema validation of input specification against the (provided) schema if validate: log.info(f"Validating spec against schema: {self.schema}") schema.validate(spec, self.schema, version=self.version) - super().__init__(spec, channels=spec['channels']) + super().__init__(spec, channels=spec["channels"]) self.measurement_names = [] - for measurement in self.get('measurements', []): - self.measurement_names.append(measurement['name']) + for measurement in self.get("measurements", []): + self.measurement_names.append(measurement["name"]) self.observations = {} - for obs in self['observations']: - self.observations[obs['name']] = obs['data'] + for obs in self["observations"]: + self.observations[obs["name"]] = obs["data"] if config_kwargs: raise exceptions.Unsupported( @@ -371,22 +371,22 @@ def get_measurement(self, measurement_name=None, measurement_index=None): if measurement_name not in self.measurement_names: log.debug(f"measurements defined: {self.measurement_names}") raise exceptions.InvalidMeasurement( - f'no measurement by name \'{measurement_name:s}\' was found in the workspace, pick from one of the valid ones above' + f"no measurement by name '{measurement_name:s}' was found in the workspace, pick from one of the valid ones above" ) - measurement = self['measurements'][ + measurement = self["measurements"][ self.measurement_names.index(measurement_name) ] else: if measurement_index is None and len(self.measurement_names) > 1: log.warning( - 'multiple measurements defined. Taking the first measurement.' + "multiple measurements defined. Taking the first measurement." ) measurement_index = ( measurement_index if measurement_index is not None else 0 ) try: - measurement = self['measurements'][measurement_index] + measurement = self["measurements"][measurement_index] except IndexError: raise exceptions.InvalidMeasurement( f"The measurement index {measurement_index} is out of bounds as only {len(self.measurement_names)} measurement(s) have been defined." @@ -394,7 +394,7 @@ def get_measurement(self, measurement_name=None, measurement_index=None): else: raise exceptions.InvalidMeasurement("No measurements have been defined.") - schema.validate(measurement, 'measurement.json', version=self.version) + schema.validate(measurement, "measurement.json", version=self.version) return measurement def model( @@ -434,13 +434,13 @@ def model( ) # set poi_name if the user does not provide it - config_kwargs.setdefault('poi_name', measurement['config']['poi']) + config_kwargs.setdefault("poi_name", measurement["config"]["poi"]) log.debug(f"model being created for measurement {measurement['name']:s}") modelspec = { - 'channels': self['channels'], - 'parameters': measurement['config']['parameters'], + "channels": self["channels"], + "parameters": measurement["config"]["parameters"], } patches = patches or [] @@ -571,65 +571,65 @@ def _prune_and_rename( ) newspec = { - 'channels': [ + "channels": [ { - 'name': rename_channels.get(channel['name'], channel['name']), - 'samples': [ + "name": rename_channels.get(channel["name"], channel["name"]), + "samples": [ { - 'name': rename_samples.get(sample['name'], sample['name']), - 'data': sample['data'], - 'modifiers': [ + "name": rename_samples.get(sample["name"], sample["name"]), + "data": sample["data"], + "modifiers": [ dict( modifier, name=rename_modifiers.get( - modifier['name'], modifier['name'] + modifier["name"], modifier["name"] ), ) - for modifier in sample['modifiers'] - if modifier['name'] not in prune_modifiers - and modifier['type'] not in prune_modifier_types + for modifier in sample["modifiers"] + if modifier["name"] not in prune_modifiers + and modifier["type"] not in prune_modifier_types ], } - for sample in channel['samples'] - if sample['name'] not in prune_samples + for sample in channel["samples"] + if sample["name"] not in prune_samples ], } - for channel in self['channels'] - if channel['name'] not in prune_channels + for channel in self["channels"] + if channel["name"] not in prune_channels ], - 'measurements': [ + "measurements": [ { - 'name': rename_measurements.get( - measurement['name'], measurement['name'] + "name": rename_measurements.get( + measurement["name"], measurement["name"] ), - 'config': { - 'parameters': [ + "config": { + "parameters": [ dict( parameter, name=rename_modifiers.get( - parameter['name'], parameter['name'] + parameter["name"], parameter["name"] ), ) - for parameter in measurement['config']['parameters'] - if parameter['name'] not in prune_modifiers + for parameter in measurement["config"]["parameters"] + if parameter["name"] not in prune_modifiers ], - 'poi': rename_modifiers.get( - measurement['config']['poi'], measurement['config']['poi'] + "poi": rename_modifiers.get( + measurement["config"]["poi"], measurement["config"]["poi"] ), }, } - for measurement in self['measurements'] - if measurement['name'] not in prune_measurements + for measurement in self["measurements"] + if measurement["name"] not in prune_measurements ], - 'observations': [ + "observations": [ dict( copy.deepcopy(observation), - name=rename_channels.get(observation['name'], observation['name']), + name=rename_channels.get(observation["name"], observation["name"]), ) - for observation in self['observations'] - if observation['name'] not in prune_channels + for observation in self["observations"] + if observation["name"] not in prune_channels ], - 'version': self['version'], + "version": self["version"], } return Workspace(newspec) @@ -710,7 +710,7 @@ def rename(self, modifiers=None, samples=None, channels=None, measurements=None) @classmethod def combine( - cls, left, right, join='none', merge_channels=False, validate: bool = True + cls, left, right, join="none", merge_channels=False, validate: bool = True ): """ Return a new workspace specification that is the combination of the two workspaces. @@ -749,32 +749,32 @@ def combine( f"Workspaces must be joined using one of the valid join operations ({Workspace.valid_joins}); not {join}" ) - if merge_channels and join not in ['outer', 'left outer', 'right outer']: + if merge_channels and join not in ["outer", "left outer", "right outer"]: raise ValueError( f"You can only merge channels using the 'outer', 'left outer', or 'right outer' join operations; not {join}" ) - if join in ['left outer', 'right outer']: + if join in ["left outer", "right outer"]: log.warning( "You are using an unsafe join operation. This will silence exceptions that might be raised during a normal 'outer' operation." ) - new_version = _join_versions(join, left['version'], right['version']) + new_version = _join_versions(join, left["version"], right["version"]) new_channels = _join_channels( - join, left['channels'], right['channels'], merge=merge_channels + join, left["channels"], right["channels"], merge=merge_channels ) new_observations = _join_observations( - join, left['observations'], right['observations'] + join, left["observations"], right["observations"] ) new_measurements = _join_measurements( - join, left['measurements'], right['measurements'] + join, left["measurements"], right["measurements"] ) newspec = { - 'channels': new_channels, - 'measurements': new_measurements, - 'observations': new_observations, - 'version': new_version, + "channels": new_channels, + "measurements": new_measurements, + "observations": new_observations, + "version": new_version, } return cls(newspec, validate=validate) @@ -792,22 +792,22 @@ def sorted(cls, workspace): """ newspec = copy.deepcopy(dict(workspace)) - newspec['channels'].sort(key=lambda e: e['name']) - for channel in newspec['channels']: - channel['samples'].sort(key=lambda e: e['name']) - for sample in channel['samples']: - sample['modifiers'].sort(key=lambda e: (e['name'], e['type'])) + newspec["channels"].sort(key=lambda e: e["name"]) + for channel in newspec["channels"]: + channel["samples"].sort(key=lambda e: e["name"]) + for sample in channel["samples"]: + sample["modifiers"].sort(key=lambda e: (e["name"], e["type"])) - newspec['measurements'].sort(key=lambda e: e['name']) - for measurement in newspec['measurements']: - measurement['config']['parameters'].sort(key=lambda e: e['name']) + newspec["measurements"].sort(key=lambda e: e["name"]) + for measurement in newspec["measurements"]: + measurement["config"]["parameters"].sort(key=lambda e: e["name"]) - newspec['observations'].sort(key=lambda e: e['name']) + newspec["observations"].sort(key=lambda e: e["name"]) return cls(newspec) @classmethod - def build(cls, model, data, name='measurement', validate: bool = True): + def build(cls, model, data, name="measurement", validate: bool = True): """ Build a workspace from model and data. @@ -821,21 +821,21 @@ def build(cls, model, data, name='measurement', validate: bool = True): ~pyhf.workspace.Workspace: A new workspace object """ - workspace = copy.deepcopy(dict(channels=model.spec['channels'])) - workspace['version'] = schema.version - workspace['measurements'] = [ + workspace = copy.deepcopy(dict(channels=model.spec["channels"])) + workspace["version"] = schema.version + workspace["measurements"] = [ { - 'name': name, - 'config': { - 'poi': model.config.poi_name, - 'parameters': [ + "name": name, + "config": { + "poi": model.config.poi_name, + "parameters": [ { "bounds": [ list(x) - for x in parset_spec['paramset'].suggested_bounds + for x in parset_spec["paramset"].suggested_bounds ], - "inits": parset_spec['paramset'].suggested_init, - "fixed": parset_spec['paramset'].suggested_fixed_as_bool, + "inits": parset_spec["paramset"].suggested_init, + "fixed": parset_spec["paramset"].suggested_fixed_as_bool, "name": parset_name, } for parset_name, parset_spec in model.config.par_map.items() @@ -843,8 +843,8 @@ def build(cls, model, data, name='measurement', validate: bool = True): }, } ] - workspace['observations'] = [ - {'name': k, 'data': list(data[model.config.channel_slices[k]])} + workspace["observations"] = [ + {"name": k, "data": list(data[model.config.channel_slices[k]])} for k in model.config.channels ] return cls(workspace, validate=validate) diff --git a/src/pyhf/writexml.py b/src/pyhf/writexml.py index b419c8e394..349947f562 100644 --- a/src/pyhf/writexml.py +++ b/src/pyhf/writexml.py @@ -39,8 +39,8 @@ def __dir__(): # Therefore, 'spec' needs to be threaded through all these calls. -def _make_hist_name(channel, sample, modifier='', prefix='hist', suffix=''): - middle = '_'.join(filter(lambda x: x, [channel, sample, modifier])) +def _make_hist_name(channel, sample, modifier="", prefix="hist", suffix=""): + middle = "_".join(filter(lambda x: x, [channel, sample, modifier])) return f"{prefix}{middle}{suffix}" @@ -83,33 +83,33 @@ def build_measurement(measurementspec, modifiertypes): """ # need to determine prefixes prefixes = { - 'normsys': 'alpha_', - 'histosys': 'alpha_', - 'shapesys': 'gamma_', - 'staterror': 'gamma_', + "normsys": "alpha_", + "histosys": "alpha_", + "shapesys": "gamma_", + "staterror": "gamma_", } - config = measurementspec['config'] - name = measurementspec['name'] - poi = config['poi'] + config = measurementspec["config"] + name = measurementspec["name"] + poi = config["poi"] # we want to know which parameters are fixed (constant) # and to additionally extract the luminosity information fixed_params = [] lumi = 1.0 lumierr = 0.0 - for parameter in config['parameters']: - if parameter.get('fixed', False): - pname = parameter['name'] - if pname == 'lumi': - fixed_params.append('Lumi') + for parameter in config["parameters"]: + if parameter.get("fixed", False): + pname = parameter["name"] + if pname == "lumi": + fixed_params.append("Lumi") else: - prefix = prefixes.get(modifiertypes[pname], '') - fixed_params.append(f'{prefix}{pname}') + prefix = prefixes.get(modifiertypes[pname], "") + fixed_params.append(f"{prefix}{pname}") # we found luminosity, so handle it - if parameter['name'] == 'lumi': - lumi = parameter['auxdata'][0] - lumierr = parameter['sigmas'][0] + if parameter["name"] == "lumi": + lumi = parameter["auxdata"][0] + lumierr = parameter["sigmas"][0] # define measurement meas = ET.Element( @@ -119,44 +119,44 @@ def build_measurement(measurementspec, modifiertypes): LumiRelErr=str(lumierr), ExportOnly=str(True), ) - poiel = ET.Element('POI') + poiel = ET.Element("POI") poiel.text = poi meas.append(poiel) # add fixed parameters (constant) if fixed_params: - se = ET.Element('ParamSetting', Const='True') - se.text = ' '.join(fixed_params) + se = ET.Element("ParamSetting", Const="True") + se.text = " ".join(fixed_params) meas.append(se) return meas def build_modifier(spec, modifierspec, channelname, samplename, sampledata): - if modifierspec['name'] == 'lumi': + if modifierspec["name"] == "lumi": return None mod_map = { - 'histosys': 'HistoSys', - 'staterror': 'StatError', - 'normsys': 'OverallSys', - 'shapesys': 'ShapeSys', - 'normfactor': 'NormFactor', - 'shapefactor': 'ShapeFactor', + "histosys": "HistoSys", + "staterror": "StatError", + "normsys": "OverallSys", + "shapesys": "ShapeSys", + "normfactor": "NormFactor", + "shapefactor": "ShapeFactor", } - attrs = {'Name': modifierspec['name']} - if modifierspec['type'] == 'histosys': - attrs['HistoNameLow'] = _make_hist_name( - channelname, samplename, modifierspec['name'], suffix='Low' + attrs = {"Name": modifierspec["name"]} + if modifierspec["type"] == "histosys": + attrs["HistoNameLow"] = _make_hist_name( + channelname, samplename, modifierspec["name"], suffix="Low" ) - attrs['HistoNameHigh'] = _make_hist_name( - channelname, samplename, modifierspec['name'], suffix='High' + attrs["HistoNameHigh"] = _make_hist_name( + channelname, samplename, modifierspec["name"], suffix="High" ) - _export_root_histogram(attrs['HistoNameLow'], modifierspec['data']['lo_data']) - _export_root_histogram(attrs['HistoNameHigh'], modifierspec['data']['hi_data']) - elif modifierspec['type'] == 'normsys': - attrs['High'] = str(modifierspec['data']['hi']) - attrs['Low'] = str(modifierspec['data']['lo']) - elif modifierspec['type'] == 'normfactor': + _export_root_histogram(attrs["HistoNameLow"], modifierspec["data"]["lo_data"]) + _export_root_histogram(attrs["HistoNameHigh"], modifierspec["data"]["hi_data"]) + elif modifierspec["type"] == "normsys": + attrs["High"] = str(modifierspec["data"]["hi"]) + attrs["Low"] = str(modifierspec["data"]["lo"]) + elif modifierspec["type"] == "normfactor": # NB: only look at first measurement for normfactor configs. In order # to dump as HistFactory XML, this has to be the same for all # measurements or it will not work correctly. Why? @@ -175,49 +175,49 @@ def build_modifier(spec, modifierspec, channelname, samplename, sampledata): val = 1 low = 0 high = 10 - for p in spec['measurements'][0]['config']['parameters']: - if p['name'] == modifierspec['name']: - val = p.get('inits', [val])[0] - low, high = p.get('bounds', [[low, high]])[0] - attrs['Val'] = str(val) - attrs['Low'] = str(low) - attrs['High'] = str(high) - elif modifierspec['type'] == 'staterror': - attrs['Activate'] = 'True' - attrs['HistoName'] = _make_hist_name( - channelname, samplename, modifierspec['name'] + for p in spec["measurements"][0]["config"]["parameters"]: + if p["name"] == modifierspec["name"]: + val = p.get("inits", [val])[0] + low, high = p.get("bounds", [[low, high]])[0] + attrs["Val"] = str(val) + attrs["Low"] = str(low) + attrs["High"] = str(high) + elif modifierspec["type"] == "staterror": + attrs["Activate"] = "True" + attrs["HistoName"] = _make_hist_name( + channelname, samplename, modifierspec["name"] ) # must be deleted, HiFa XML specification does not support 'Name' - del attrs['Name'] + del attrs["Name"] # need to make this a relative uncertainty stored in ROOT file _export_root_histogram( - attrs['HistoName'], + attrs["HistoName"], np.divide( - modifierspec['data'], + modifierspec["data"], sampledata, out=np.zeros_like(sampledata), where=np.asarray(sampledata) != 0, - dtype='float', + dtype="float", ).tolist(), ) - elif modifierspec['type'] == 'shapesys': - attrs['ConstraintType'] = 'Poisson' - attrs['HistoName'] = _make_hist_name( - channelname, samplename, modifierspec['name'] + elif modifierspec["type"] == "shapesys": + attrs["ConstraintType"] = "Poisson" + attrs["HistoName"] = _make_hist_name( + channelname, samplename, modifierspec["name"] ) # need to make this a relative uncertainty stored in ROOT file _export_root_histogram( - attrs['HistoName'], + attrs["HistoName"], [ np.divide( - a, b, out=np.zeros_like(a), where=np.asarray(b) != 0, dtype='float' + a, b, out=np.zeros_like(a), where=np.asarray(b) != 0, dtype="float" ) for a, b in np.array( - (modifierspec['data'], sampledata), dtype="float" + (modifierspec["data"], sampledata), dtype="float" ).T ], ) - elif modifierspec['type'] == 'shapefactor': + elif modifierspec["type"] == "shapefactor": pass else: log.warning( @@ -225,50 +225,50 @@ def build_modifier(spec, modifierspec, channelname, samplename, sampledata): ) return None - modifier = ET.Element(mod_map[modifierspec['type']], **attrs) + modifier = ET.Element(mod_map[modifierspec["type"]], **attrs) return modifier def build_sample(spec, samplespec, channelname): - histname = _make_hist_name(channelname, samplespec['name']) + histname = _make_hist_name(channelname, samplespec["name"]) attrs = { - 'Name': samplespec['name'], - 'HistoName': histname, - 'InputFile': _ROOT_DATA_FILE.file_path, - 'NormalizeByTheory': 'False', + "Name": samplespec["name"], + "HistoName": histname, + "InputFile": _ROOT_DATA_FILE.file_path, + "NormalizeByTheory": "False", } - sample = ET.Element('Sample', **attrs) - for modspec in samplespec['modifiers']: + sample = ET.Element("Sample", **attrs) + for modspec in samplespec["modifiers"]: # if lumi modifier added for this sample, need to set NormalizeByTheory - if modspec['type'] == 'lumi': - sample.attrib.update({'NormalizeByTheory': 'True'}) + if modspec["type"] == "lumi": + sample.attrib.update({"NormalizeByTheory": "True"}) modifier = build_modifier( - spec, modspec, channelname, samplespec['name'], samplespec['data'] + spec, modspec, channelname, samplespec["name"], samplespec["data"] ) if modifier is not None: sample.append(modifier) - _export_root_histogram(histname, samplespec['data']) + _export_root_histogram(histname, samplespec["data"]) return sample def build_data(obsspec, channelname): - histname = _make_hist_name(channelname, 'data') - data = ET.Element('Data', HistoName=histname, InputFile=_ROOT_DATA_FILE.file_path) + histname = _make_hist_name(channelname, "data") + data = ET.Element("Data", HistoName=histname, InputFile=_ROOT_DATA_FILE.file_path) - observation = next((obs for obs in obsspec if obs['name'] == channelname), None) - _export_root_histogram(histname, observation['data']) + observation = next((obs for obs in obsspec if obs["name"] == channelname), None) + _export_root_histogram(histname, observation["data"]) return data def build_channel(spec, channelspec, obsspec): channel = ET.Element( - 'Channel', Name=channelspec['name'], InputFile=_ROOT_DATA_FILE.file_path + "Channel", Name=channelspec["name"], InputFile=_ROOT_DATA_FILE.file_path ) if obsspec: - data = build_data(obsspec, channelspec['name']) + data = build_data(obsspec, channelspec["name"]) channel.append(data) - for samplespec in channelspec['samples']: - channel.append(build_sample(spec, samplespec, channelspec['name'])) + for samplespec in channelspec["samples"]: + channel.append(build_sample(spec, samplespec, channelspec["name"])) return channel @@ -276,26 +276,26 @@ def writexml(spec, specdir, data_rootdir, resultprefix): global _ROOT_DATA_FILE shutil.copyfile( - schema_path.joinpath('HistFactorySchema.dtd'), - Path(specdir).parent.joinpath('HistFactorySchema.dtd'), + schema_path.joinpath("HistFactorySchema.dtd"), + Path(specdir).parent.joinpath("HistFactorySchema.dtd"), ) combination = ET.Element( "Combination", OutputFilePrefix=str(Path(specdir).joinpath(resultprefix)) ) - with uproot.recreate(Path(data_rootdir).joinpath('data.root')) as _ROOT_DATA_FILE: - for channelspec in spec['channels']: + with uproot.recreate(Path(data_rootdir).joinpath("data.root")) as _ROOT_DATA_FILE: + for channelspec in spec["channels"]: channelfilename = str( - Path(specdir).joinpath(f'{resultprefix}_{channelspec["name"]}.xml') + Path(specdir).joinpath(f"{resultprefix}_{channelspec['name']}.xml") ) with open(channelfilename, "w", encoding="utf-8") as channelfile: - channel = build_channel(spec, channelspec, spec.get('observations')) + channel = build_channel(spec, channelspec, spec.get("observations")) indent(channel) channelfile.write( "\n\n" ) channelfile.write( - ET.tostring(channel, encoding='utf-8').decode('utf-8') + ET.tostring(channel, encoding="utf-8").decode("utf-8") ) inp = ET.Element("Input") @@ -303,11 +303,11 @@ def writexml(spec, specdir, data_rootdir, resultprefix): combination.append(inp) # need information about modifier types to get the right prefix in measurement - mixin = _ChannelSummaryMixin(channels=spec['channels']) + mixin = _ChannelSummaryMixin(channels=spec["channels"]) - for measurement in spec['measurements']: + for measurement in spec["measurements"]: combination.append(build_measurement(measurement, dict(mixin.modifiers))) indent(combination) return b"\n\n" + ET.tostring( - combination, encoding='utf-8' + combination, encoding="utf-8" ) diff --git a/tests/benchmarks/test_benchmark.py b/tests/benchmarks/test_benchmark.py index 31399ec6a6..e8a21b9f04 100644 --- a/tests/benchmarks/test_benchmark.py +++ b/tests/benchmarks/test_benchmark.py @@ -22,8 +22,8 @@ def generate_source_static(n_bins): sig = [30.0] * n_bins source = { - 'binning': binning, - 'bindata': {'data': data, 'bkg': bkg, 'bkgerr': bkgerr, 'sig': sig}, + "binning": binning, + "bindata": {"data": data, "bkg": bkg, "bkgerr": bkgerr, "sig": sig}, } return source @@ -47,8 +47,8 @@ def generate_source_poisson(n_bins): sig = np.random.poisson(30.0, n_bins).tolist() source = { - 'binning': binning, - 'bindata': {'data': data, 'bkg': bkg, 'bkgerr': bkgerr, 'sig': sig}, + "binning": binning, + "bindata": {"data": data, "bkg": bkg, "bkgerr": bkgerr, "sig": sig}, } return source @@ -67,10 +67,10 @@ def hypotest(pdf, data): bins = [1, 10, 50, 100, 200] -bin_ids = [f'{n_bins}_bins' for n_bins in bins] +bin_ids = [f"{n_bins}_bins" for n_bins in bins] -@pytest.mark.parametrize('n_bins', bins, ids=bin_ids) +@pytest.mark.parametrize("n_bins", bins, ids=bin_ids) def test_hypotest(benchmark, backend, n_bins): """ Benchmark the performance of pyhf.utils.hypotest() @@ -86,7 +86,7 @@ def test_hypotest(benchmark, backend, n_bins): """ source = generate_source_static(n_bins) pdf = uncorrelated_background( - source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr'] + source["bindata"]["sig"], source["bindata"]["bkg"], source["bindata"]["bkgerr"] ) - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata assert benchmark(hypotest, pdf, data) diff --git a/tests/conftest.py b/tests/conftest.py index 7b7919f209..d840f21824 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -40,7 +40,7 @@ def _get_json_from_tarfile(archive_data_path, json_name): return _get_json_from_tarfile -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def isolate_modules(): """ This fixture isolates the sys.modules imported in case you need to mess around with them and do not want to break other tests. @@ -52,7 +52,7 @@ def isolate_modules(): sys.modules.update(CACHE_MODULES) -@pytest.fixture(scope='function', autouse=True) +@pytest.fixture(scope="function", autouse=True) def reset_events(): """ This fixture is automatically run to clear out the events registered before and after a test function runs. @@ -64,18 +64,18 @@ def reset_events(): pyhf.events.__disabled_events.clear() -@pytest.fixture(scope='function', autouse=True) +@pytest.fixture(scope="function", autouse=True) def reset_backend(): """ This fixture is automatically run to reset the backend before and after a test function runs. """ - pyhf.set_backend('numpy', default=True) + pyhf.set_backend("numpy", default=True) yield reset_backend - pyhf.set_backend('numpy', default=True) + pyhf.set_backend("numpy", default=True) @pytest.fixture( - scope='function', + scope="function", params=[ (("numpy_backend", dict()), ("scipy_optimizer", dict())), (("jax_backend", dict()), ("scipy_optimizer", dict())), @@ -84,7 +84,7 @@ def reset_backend(): ("minuit_optimizer", dict()), ), ], - ids=['numpy', 'jax', 'numpy_minuit'], + ids=["numpy", "jax", "numpy_minuit"], ) def backend(request): # a better way to get the id? all the backends we have so far for testing @@ -95,12 +95,12 @@ def backend(request): func_name = request._pyfuncitem.name # skip backends if specified - skip_backend = request.node.get_closest_marker(f'skip_{param_id}') + skip_backend = request.node.get_closest_marker(f"skip_{param_id}") # allow the specific backend to fail if specified - fail_backend = request.node.get_closest_marker(f'fail_{param_id}') + fail_backend = request.node.get_closest_marker(f"fail_{param_id}") # only look at the specific backends only_backends = [ - pid for pid in param_ids if request.node.get_closest_marker(f'only_{pid}') + pid for pid in param_ids if request.node.get_closest_marker(f"only_{pid}") ] disable_backend = any( backend in param_id for backend in request.config.option.disable_backend @@ -148,15 +148,15 @@ def backend(request): @pytest.fixture( - scope='function', + scope="function", params=[0, 1, 2, 4], - ids=['interpcode0', 'interpcode1', 'interpcode2', 'interpcode4'], + ids=["interpcode0", "interpcode1", "interpcode2", "interpcode4"], ) def interpcode(request): yield request.param -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def datadir(tmp_path, request): """ Fixture responsible for searching a folder with the same name of test @@ -165,7 +165,7 @@ def datadir(tmp_path, request): """ # this gets the module name (e.g. /path/to/pyhf/tests/test_schema.py) # and then gets the directory by removing the suffix (e.g. /path/to/pyhf/tests/test_schema) - test_dir = pathlib.Path(request.module.__file__).with_suffix('') + test_dir = pathlib.Path(request.module.__file__).with_suffix("") if test_dir.is_dir(): shutil.copytree(test_dir, tmp_path, dirs_exist_ok=True) diff --git a/tests/test_backend_consistency.py b/tests/test_backend_consistency.py index 586dbd002c..2d595804fb 100644 --- a/tests/test_backend_consistency.py +++ b/tests/test_backend_consistency.py @@ -21,8 +21,8 @@ def generate_source_static(n_bins): sig = [30.0] * n_bins source = { - 'binning': binning, - 'bindata': {'data': data, 'bkg': bkg, 'bkgerr': bkgerr, 'sig': sig}, + "binning": binning, + "bindata": {"data": data, "bkg": bkg, "bkgerr": bkgerr, "sig": sig}, } return source @@ -46,21 +46,21 @@ def generate_source_poisson(n_bins): sig = np.random.poisson(30.0, n_bins).tolist() source = { - 'binning': binning, - 'bindata': {'data': data, 'bkg': bkg, 'bkgerr': bkgerr, 'sig': sig}, + "binning": binning, + "bindata": {"data": data, "bkg": bkg, "bkgerr": bkgerr, "sig": sig}, } return source # bins = [1, 10, 50, 100, 200, 500, 800, 1000] bins = [50, 500] -bin_ids = [f'{n_bins}_bins' for n_bins in bins] +bin_ids = [f"{n_bins}_bins" for n_bins in bins] -@pytest.mark.parametrize('n_bins', bins, ids=bin_ids) -@pytest.mark.parametrize('invert_order', [False, True], ids=['normal', 'inverted']) +@pytest.mark.parametrize("n_bins", bins, ids=bin_ids) +@pytest.mark.parametrize("invert_order", [False, True], ids=["normal", "inverted"]) def test_hypotest_qmu_tilde( - n_bins, invert_order, tolerance={'numpy': 1e-02, 'tensors': 5e-03} + n_bins, invert_order, tolerance={"numpy": 1e-02, "tensors": 5e-03} ): """ Check that the different backends all compute a test statistic @@ -78,19 +78,19 @@ def test_hypotest_qmu_tilde( source = generate_source_static(n_bins) signal_sample = { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [{'name': 'mu', 'type': 'normfactor', 'data': None}], + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [{"name": "mu", "type": "normfactor", "data": None}], } background_sample = { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'uncorr_bkguncrt', - 'type': 'shapesys', - 'data': source['bindata']['bkgerr'], + "name": "uncorr_bkguncrt", + "type": "shapesys", + "data": source["bindata"]["bkgerr"], } ], } @@ -99,14 +99,14 @@ def test_hypotest_qmu_tilde( if invert_order else [signal_sample, background_sample] ) - spec = {'channels': [{'name': 'singlechannel', 'samples': samples}]} + spec = {"channels": [{"name": "singlechannel", "samples": samples}]} pdf = pyhf.Model(spec, poi_name="mu") - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata backends = [ - pyhf.tensor.numpy_backend(precision='64b'), - pyhf.tensor.jax_backend(precision='64b'), + pyhf.tensor.numpy_backend(precision="64b"), + pyhf.tensor.jax_backend(precision="64b"), ] test_statistic = [] @@ -129,7 +129,7 @@ def test_hypotest_qmu_tilde( numpy_ratio_delta_unity = np.absolute(np.subtract(numpy_ratio, 1)) try: - assert (numpy_ratio_delta_unity < tolerance['numpy']).all() + assert (numpy_ratio_delta_unity < tolerance["numpy"]).all() except AssertionError: print( f"Ratio to NumPy+SciPy exceeded tolerance of {tolerance['numpy']}: {numpy_ratio_delta_unity.tolist()}" diff --git a/tests/test_backends.py b/tests/test_backends.py index 5c205868e7..b9e4b641c1 100644 --- a/tests/test_backends.py +++ b/tests/test_backends.py @@ -7,18 +7,18 @@ def test_default_backend(): pyhf.set_backend("jax", default=True) - assert pyhf.default_backend.name == 'jax' - assert pyhf.tensorlib.name == 'jax' + assert pyhf.default_backend.name == "jax" + assert pyhf.tensorlib.name == "jax" def test_nondefault_backend(): pyhf.set_backend("jax", default=False) - assert pyhf.default_backend.name == 'numpy' - assert pyhf.tensorlib.name == 'jax' + assert pyhf.default_backend.name == "numpy" + assert pyhf.tensorlib.name == "jax" -@pytest.mark.parametrize('jitted', (False, True)) +@pytest.mark.parametrize("jitted", (False, True)) def test_diffable_backend(jitted): pyhf.set_backend("jax", default=True) diff --git a/tests/test_calculator.py b/tests/test_calculator.py index 2395d1ff8c..0bfe8bebed 100644 --- a/tests/test_calculator.py +++ b/tests/test_calculator.py @@ -38,24 +38,24 @@ def test_generate_asimov_can_return_fitted_pars(return_fitted_pars): # in AsymptotiCalculator.teststatistic, where the fit results should be set # the other kwargs don't impact the logic of that method, # so leave them at the default so as not to put a burden on future changes -@pytest.mark.parametrize('test_stat', ['qtilde', 'q', 'q0']) +@pytest.mark.parametrize("test_stat", ["qtilde", "q", "q0"]) def test_asymptotic_calculator_has_fitted_pars(test_stat): model = pyhf.simplemodels.uncorrelated_background([1], [1], [1]) data = [2, 1] # [main, aux] calc = pyhf.infer.calculators.AsymptoticCalculator(data, model, test_stat=test_stat) - calc.teststatistic(0 if test_stat == 'q0' else 1) + calc.teststatistic(0 if test_stat == "q0" else 1) - assert hasattr(calc, 'fitted_pars') + assert hasattr(calc, "fitted_pars") fitted_pars = calc.fitted_pars - assert hasattr(fitted_pars, 'asimov_pars') - assert hasattr(fitted_pars, 'fixed_poi_fit_to_data') - assert hasattr(fitted_pars, 'fixed_poi_fit_to_asimov') - assert hasattr(fitted_pars, 'free_fit_to_data') - assert hasattr(fitted_pars, 'free_fit_to_asimov') + assert hasattr(fitted_pars, "asimov_pars") + assert hasattr(fitted_pars, "fixed_poi_fit_to_data") + assert hasattr(fitted_pars, "fixed_poi_fit_to_asimov") + assert hasattr(fitted_pars, "free_fit_to_data") + assert hasattr(fitted_pars, "free_fit_to_asimov") rtol = 1e-5 - if test_stat == 'q0': + if test_stat == "q0": assert pytest.approx([1.0, 1.0], rel=rtol) == pyhf.tensorlib.tolist( fitted_pars.asimov_pars ) diff --git a/tests/test_cli.py b/tests/test_cli.py index 2bfd57d882..ce257e659c 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -8,15 +8,15 @@ def test_shllcomplete_cli(isolate_modules): from pyhf.cli.complete import cli runner = CliRunner() - result = runner.invoke(cli, ['bash']) - assert 'complete -F _pyhf_completion -o default pyhf' in result.output + result = runner.invoke(cli, ["bash"]) + assert "complete -F _pyhf_completion -o default pyhf" in result.output def test_shllcomplete_cli_missing_extra(isolate_modules): - sys.modules['click_completion'] = None - importlib.reload(sys.modules['pyhf.cli.complete']) + sys.modules["click_completion"] = None + importlib.reload(sys.modules["pyhf.cli.complete"]) from pyhf.cli.complete import cli runner = CliRunner() - result = runner.invoke(cli, ['bash']) - assert 'You can install it with the shellcomplete extra' in result.output + result = runner.invoke(cli, ["bash"]) + assert "You can install it with the shellcomplete extra" in result.output diff --git a/tests/test_combined_modifiers.py b/tests/test_combined_modifiers.py index 77b6d65ed2..8293dd3816 100644 --- a/tests/test_combined_modifiers.py +++ b/tests/test_combined_modifiers.py @@ -28,28 +28,28 @@ def __init__(self, par_map, par_order, samples, channels=None, channel_nbins=Non def suggested_init(self): init = [] for name in self.par_order: - init = init + self.par_map[name]['paramset'].suggested_init + init = init + self.par_map[name]["paramset"].suggested_init return init def suggested_bounds(self): bounds = [] for name in self.par_order: - bounds = bounds + self.par_map[name]['paramset'].suggested_bounds + bounds = bounds + self.par_map[name]["paramset"].suggested_bounds return bounds def par_slice(self, name): - return self.par_map[name]['slice'] + return self.par_map[name]["slice"] def param_set(self, name): - return self.par_map[name]['paramset'] + return self.par_map[name]["paramset"] def test_histosys(backend): mc = MockConfig( par_map={ - 'hello': { - 'paramset': constrained_by_normal( - name='hello', + "hello": { + "paramset": constrained_by_normal( + name="hello", is_scalar=True, n_parameters=1, inits=[0], @@ -57,11 +57,11 @@ def test_histosys(backend): fixed=False, auxdata=[0.0], ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'world': { - 'paramset': constrained_by_normal( - name='world', + "world": { + "paramset": constrained_by_normal( + name="world", is_scalar=True, n_parameters=1, inits=[0], @@ -69,62 +69,62 @@ def test_histosys(backend): fixed=False, auxdata=[0.0], ), - 'slice': slice(1, 2), + "slice": slice(1, 2), }, }, - par_order=['hello', 'world'], - samples=['signal', 'background'], + par_order=["hello", "world"], + samples=["signal", "background"], ) mega_mods = { - 'histosys/hello': { - 'signal': { - 'type': 'histosys', - 'name': 'hello', - 'data': { - 'hi_data': [11, 12, 13], - 'lo_data': [9, 8, 7], - 'nom_data': [10, 10, 10], - 'mask': [True, True, True], + "histosys/hello": { + "signal": { + "type": "histosys", + "name": "hello", + "data": { + "hi_data": [11, 12, 13], + "lo_data": [9, 8, 7], + "nom_data": [10, 10, 10], + "mask": [True, True, True], }, }, - 'background': { - 'type': 'histosys', - 'name': 'hello', - 'data': { - 'hi_data': [11, 12, 13], - 'lo_data': [9, 8, 7], - 'nom_data': [10, 10, 10], - 'mask': [True, True, True], + "background": { + "type": "histosys", + "name": "hello", + "data": { + "hi_data": [11, 12, 13], + "lo_data": [9, 8, 7], + "nom_data": [10, 10, 10], + "mask": [True, True, True], }, }, }, - 'histosys/world': { - 'signal': { - 'type': 'histosys', - 'name': 'world', - 'data': { - 'hi_data': [10, 10, 10], - 'lo_data': [5, 6, 7], - 'nom_data': [10, 10, 10], - 'mask': [True, True, True], + "histosys/world": { + "signal": { + "type": "histosys", + "name": "world", + "data": { + "hi_data": [10, 10, 10], + "lo_data": [5, 6, 7], + "nom_data": [10, 10, 10], + "mask": [True, True, True], }, }, - 'background': { - 'type': 'histosys', - 'name': 'world', - 'data': { - 'hi_data': [10, 10, 10], - 'lo_data': [5, 6, 7], - 'nom_data': [10, 10, 10], - 'mask': [True, True, True], + "background": { + "type": "histosys", + "name": "world", + "data": { + "hi_data": [10, 10, 10], + "lo_data": [5, 6, 7], + "nom_data": [10, 10, 10], + "mask": [True, True, True], }, }, }, } hsc = histosys_combined( - [('hello', 'histosys'), ('world', 'histosys')], mc, mega_mods + [("hello", "histosys"), ("world", "histosys")], mc, mega_mods ) mod = hsc.apply(pyhf.tensorlib.astensor([0.5, -1.0])) @@ -134,7 +134,7 @@ def test_histosys(backend): assert np.allclose(mod[0, 0, 0], [0.5, 1.0, 1.5]) hsc = histosys_combined( - [('hello', 'histosys'), ('world', 'histosys')], mc, mega_mods, batch_size=4 + [("hello", "histosys"), ("world", "histosys")], mc, mega_mods, batch_size=4 ) mod = hsc.apply( @@ -152,9 +152,9 @@ def test_histosys(backend): def test_normsys(backend): mc = MockConfig( par_map={ - 'hello': { - 'paramset': constrained_by_normal( - name='hello', + "hello": { + "paramset": constrained_by_normal( + name="hello", is_scalar=True, n_parameters=1, inits=[0], @@ -162,11 +162,11 @@ def test_normsys(backend): fixed=False, auxdata=[0.0], ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'world': { - 'paramset': constrained_by_normal( - name='world', + "world": { + "paramset": constrained_by_normal( + name="world", is_scalar=True, n_parameters=1, inits=[0], @@ -174,61 +174,61 @@ def test_normsys(backend): fixed=False, auxdata=[0.0], ), - 'slice': slice(1, 2), + "slice": slice(1, 2), }, }, - par_order=['hello', 'world'], - samples=['signal', 'background'], + par_order=["hello", "world"], + samples=["signal", "background"], ) mega_mods = { - 'normsys/hello': { - 'signal': { - 'type': 'normsys', - 'name': 'hello', - 'data': { - 'hi': [1.1] * 3, - 'lo': [0.9] * 3, - 'nom_data': [1, 1, 1], - 'mask': [True, True, True], + "normsys/hello": { + "signal": { + "type": "normsys", + "name": "hello", + "data": { + "hi": [1.1] * 3, + "lo": [0.9] * 3, + "nom_data": [1, 1, 1], + "mask": [True, True, True], }, }, - 'background': { - 'type': 'normsys', - 'name': 'hello', - 'data': { - 'hi': [1.2] * 3, - 'lo': [0.8] * 3, - 'nom_data': [1, 1, 1], - 'mask': [True, True, True], + "background": { + "type": "normsys", + "name": "hello", + "data": { + "hi": [1.2] * 3, + "lo": [0.8] * 3, + "nom_data": [1, 1, 1], + "mask": [True, True, True], }, }, }, - 'normsys/world': { - 'signal': { - 'type': 'v', - 'name': 'world', - 'data': { - 'hi': [1.3] * 3, - 'lo': [0.7] * 3, - 'nom_data': [1, 1, 1], - 'mask': [True, True, True], + "normsys/world": { + "signal": { + "type": "v", + "name": "world", + "data": { + "hi": [1.3] * 3, + "lo": [0.7] * 3, + "nom_data": [1, 1, 1], + "mask": [True, True, True], }, }, - 'background': { - 'type': 'normsys', - 'name': 'world', - 'data': { - 'hi': [1.4] * 3, - 'lo': [0.6] * 3, - 'nom_data': [1, 1, 1], - 'mask': [True, True, True], + "background": { + "type": "normsys", + "name": "world", + "data": { + "hi": [1.4] * 3, + "lo": [0.6] * 3, + "nom_data": [1, 1, 1], + "mask": [True, True, True], }, }, }, } - hsc = normsys_combined([('hello', 'normsys'), ('world', 'normsys')], mc, mega_mods) + hsc = normsys_combined([("hello", "normsys"), ("world", "normsys")], mc, mega_mods) mod = hsc.apply(pyhf.tensorlib.astensor([1.0, -1.0])) shape = pyhf.tensorlib.shape(mod) @@ -240,7 +240,7 @@ def test_normsys(backend): assert np.allclose(mod[1, 1, 0], [0.6, 0.6, 0.6]) hsc = normsys_combined( - [('hello', 'normsys'), ('world', 'normsys')], mc, mega_mods, batch_size=4 + [("hello", "normsys"), ("world", "normsys")], mc, mega_mods, batch_size=4 ) mod = hsc.apply( @@ -259,9 +259,9 @@ def test_normsys(backend): def test_lumi(backend): mc = MockConfig( par_map={ - 'lumi': { - 'paramset': constrained_by_normal( - name='lumi', + "lumi": { + "paramset": constrained_by_normal( + name="lumi", is_scalar=True, n_parameters=1, inits=[0], @@ -270,29 +270,29 @@ def test_lumi(backend): auxdata=[None], sigmas=[None], ), - 'slice': slice(0, 1), + "slice": slice(0, 1), } }, - par_order=['lumi'], - samples=['signal', 'background'], + par_order=["lumi"], + samples=["signal", "background"], ) mega_mods = { - 'lumi/lumi': { - 'signal': { - 'type': 'lumi', - 'name': 'lumi', - 'data': {'mask': [True, True, True]}, + "lumi/lumi": { + "signal": { + "type": "lumi", + "name": "lumi", + "data": {"mask": [True, True, True]}, }, - 'background': { - 'type': 'lumi', - 'name': 'lumi', - 'data': {'mask': [True, True, True]}, + "background": { + "type": "lumi", + "name": "lumi", + "data": {"mask": [True, True, True]}, }, }, } - hsc = lumi_combined([('lumi', 'lumi')], mc, mega_mods) + hsc = lumi_combined([("lumi", "lumi")], mc, mega_mods) mod = hsc.apply(pyhf.tensorlib.astensor([0.5])) shape = pyhf.tensorlib.shape(mod) @@ -302,7 +302,7 @@ def test_lumi(backend): assert np.allclose(mod[0, 0, 0], [0.5, 0.5, 0.5]) assert np.allclose(mod[0, 1, 0], [0.5, 0.5, 0.5]) - hsc = lumi_combined([('lumi', 'lumi')], mc, mega_mods, batch_size=4) + hsc = lumi_combined([("lumi", "lumi")], mc, mega_mods, batch_size=4) mod = hsc.apply(pyhf.tensorlib.astensor([[1.0], [2.0], [3.0], [4.0]])) shape = pyhf.tensorlib.shape(mod) @@ -318,9 +318,9 @@ def test_lumi(backend): def test_stat(backend): mc = MockConfig( par_map={ - 'staterror_chan1': { - 'paramset': constrained_by_normal( - name='staterror_chan1', + "staterror_chan1": { + "paramset": constrained_by_normal( + name="staterror_chan1", is_scalar=False, n_parameters=1, inits=[1], @@ -328,11 +328,11 @@ def test_stat(backend): fixed=False, auxdata=[1], ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'staterror_chan2': { - 'paramset': constrained_by_normal( - name='staterror_chan2', + "staterror_chan2": { + "paramset": constrained_by_normal( + name="staterror_chan2", is_scalar=False, n_parameters=2, inits=[1, 1], @@ -340,59 +340,59 @@ def test_stat(backend): fixed=False, auxdata=[1, 1], ), - 'slice': slice(1, 3), + "slice": slice(1, 3), }, }, - channels=['chan1', 'chan2'], - channel_nbins={'chan1': 1, 'chan2': 2}, - par_order=['staterror_chan1', 'staterror_chan2'], - samples=['signal', 'background'], + channels=["chan1", "chan2"], + channel_nbins={"chan1": 1, "chan2": 2}, + par_order=["staterror_chan1", "staterror_chan2"], + samples=["signal", "background"], ) mega_mods = { - 'staterror/staterror_chan1': { - 'signal': { - 'type': 'staterror', - 'name': 'staterror_chan1', - 'data': { - 'mask': [True, False, False], - 'nom_data': [10, 10, 10], - 'uncrt': [1, 0, 0], + "staterror/staterror_chan1": { + "signal": { + "type": "staterror", + "name": "staterror_chan1", + "data": { + "mask": [True, False, False], + "nom_data": [10, 10, 10], + "uncrt": [1, 0, 0], }, }, - 'background': { - 'type': 'staterror', - 'name': 'staterror_chan1', - 'data': { - 'mask': [True, False, False], - 'nom_data': [10, 10, 10], - 'uncrt': [1, 0, 0], + "background": { + "type": "staterror", + "name": "staterror_chan1", + "data": { + "mask": [True, False, False], + "nom_data": [10, 10, 10], + "uncrt": [1, 0, 0], }, }, }, - 'staterror/staterror_chan2': { - 'signal': { - 'type': 'staterror', - 'name': 'staterror_chan2', - 'data': { - 'mask': [False, True, True], - 'nom_data': [10, 10, 10], - 'uncrt': [0, 1, 1], + "staterror/staterror_chan2": { + "signal": { + "type": "staterror", + "name": "staterror_chan2", + "data": { + "mask": [False, True, True], + "nom_data": [10, 10, 10], + "uncrt": [0, 1, 1], }, }, - 'background': { - 'type': 'staterror', - 'name': 'staterror_chan2', - 'data': { - 'mask': [False, True, True], - 'nom_data': [10, 10, 10], - 'uncrt': [0, 1, 1], + "background": { + "type": "staterror", + "name": "staterror_chan2", + "data": { + "mask": [False, True, True], + "nom_data": [10, 10, 10], + "uncrt": [0, 1, 1], }, }, }, } hsc = staterror_combined( - [('staterror_chan1', 'staterror'), ('staterror_chan2', 'staterror')], + [("staterror_chan1", "staterror"), ("staterror_chan2", "staterror")], mc, mega_mods, ) @@ -409,20 +409,20 @@ def test_stat(backend): def test_shapesys(backend): mc = MockConfig( par_map={ - 'dummy1': { - 'paramset': paramset( - name='dummy1', + "dummy1": { + "paramset": paramset( + name="dummy1", is_scalar=True, n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'shapesys1': { - 'paramset': constrained_by_poisson( - name='shapesys1', + "shapesys1": { + "paramset": constrained_by_poisson( + name="shapesys1", is_scalar=False, n_parameters=1, inits=[0], @@ -431,11 +431,11 @@ def test_shapesys(backend): auxdata=[None], factors=[None], ), - 'slice': slice(1, 2), + "slice": slice(1, 2), }, - 'shapesys2': { - 'paramset': constrained_by_poisson( - name='shapesys2', + "shapesys2": { + "paramset": constrained_by_poisson( + name="shapesys2", n_parameters=2, is_scalar=False, inits=[0, 0], @@ -444,70 +444,70 @@ def test_shapesys(backend): auxdata=[None, None], factors=[None, None], ), - 'slice': slice(2, 4), + "slice": slice(2, 4), }, - 'dummy2': { - 'paramset': paramset( - name='dummy2', + "dummy2": { + "paramset": paramset( + name="dummy2", is_scalar=True, n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, ), - 'slice': slice(4, 5), + "slice": slice(4, 5), }, }, - channels=['chan1', 'chan2'], - channel_nbins={'chan1': 1, 'chan2': 2}, - par_order=['dummy1', 'shapesys1', 'shapesys2', 'dummy2'], - samples=['signal', 'background'], + channels=["chan1", "chan2"], + channel_nbins={"chan1": 1, "chan2": 2}, + par_order=["dummy1", "shapesys1", "shapesys2", "dummy2"], + samples=["signal", "background"], ) mega_mods = { - 'shapesys/shapesys1': { - 'signal': { - 'type': 'shapesys', - 'name': 'shapesys1', - 'data': { - 'mask': [True, False, False], - 'nom_data': [10, 10, 10], - 'uncrt': [1, 0, 0], + "shapesys/shapesys1": { + "signal": { + "type": "shapesys", + "name": "shapesys1", + "data": { + "mask": [True, False, False], + "nom_data": [10, 10, 10], + "uncrt": [1, 0, 0], }, }, - 'background': { - 'type': 'shapesys', - 'name': 'shapesys1', - 'data': { - 'mask': [True, False, False], - 'nom_data': [10, 10, 10], - 'uncrt': [1, 0, 0], + "background": { + "type": "shapesys", + "name": "shapesys1", + "data": { + "mask": [True, False, False], + "nom_data": [10, 10, 10], + "uncrt": [1, 0, 0], }, }, }, - 'shapesys/shapesys2': { - 'signal': { - 'type': 'shapesys', - 'name': 'shapesys1', - 'data': { - 'mask': [False, True, True], - 'nom_data': [10, 10, 10], - 'uncrt': [0, 1, 1], + "shapesys/shapesys2": { + "signal": { + "type": "shapesys", + "name": "shapesys1", + "data": { + "mask": [False, True, True], + "nom_data": [10, 10, 10], + "uncrt": [0, 1, 1], }, }, - 'background': { - 'type': 'shapesys', - 'name': 'shapesys1', - 'data': { - 'mask': [False, True, True], - 'nom_data': [10, 10, 10], - 'uncrt': [0, 1, 1], + "background": { + "type": "shapesys", + "name": "shapesys1", + "data": { + "mask": [False, True, True], + "nom_data": [10, 10, 10], + "uncrt": [0, 1, 1], }, }, }, } hsc = shapesys_combined( - [('shapesys1', 'shapesys'), ('shapesys2', 'shapesys')], mc, mega_mods + [("shapesys1", "shapesys"), ("shapesys2", "shapesys")], mc, mega_mods ) mod = hsc.apply(pyhf.tensorlib.astensor([-10, 1.1, 1.2, 1.3, -20])) @@ -522,61 +522,61 @@ def test_shapesys(backend): def test_normfactor(backend): mc = MockConfig( par_map={ - 'mu1': { - 'paramset': unconstrained( - name='mu1', + "mu1": { + "paramset": unconstrained( + name="mu1", is_scalar=True, n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'mu2': { - 'paramset': unconstrained( - name='mu2', + "mu2": { + "paramset": unconstrained( + name="mu2", is_scalar=True, n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, ), - 'slice': slice(1, 2), + "slice": slice(1, 2), }, }, - par_order=['mu1', 'mu2'], - samples=['signal', 'background'], + par_order=["mu1", "mu2"], + samples=["signal", "background"], ) mega_mods = { - 'normfactor/mu1': { - 'signal': { - 'type': 'normfactor', - 'name': 'mu1', - 'data': {'mask': [True, False, False]}, + "normfactor/mu1": { + "signal": { + "type": "normfactor", + "name": "mu1", + "data": {"mask": [True, False, False]}, }, - 'background': { - 'type': 'normfactor', - 'name': 'mu1', - 'data': {'mask': [True, False, False]}, + "background": { + "type": "normfactor", + "name": "mu1", + "data": {"mask": [True, False, False]}, }, }, - 'normfactor/mu2': { - 'signal': { - 'type': 'normfactor', - 'name': 'mu2', - 'data': {'mask': [False, True, True]}, + "normfactor/mu2": { + "signal": { + "type": "normfactor", + "name": "mu2", + "data": {"mask": [False, True, True]}, }, - 'background': { - 'type': 'normfactor', - 'name': 'mu2', - 'data': {'mask': [False, True, True]}, + "background": { + "type": "normfactor", + "name": "mu2", + "data": {"mask": [False, True, True]}, }, }, } hsc = normfactor_combined( - [('mu1', 'normfactor'), ('mu2', 'normfactor')], mc, mega_mods + [("mu1", "normfactor"), ("mu2", "normfactor")], mc, mega_mods ) mod = hsc.apply(pyhf.tensorlib.astensor([2.0, 3.0])) @@ -588,7 +588,7 @@ def test_normfactor(backend): assert np.allclose(mod[1, 0, 0], [1.0, 3.0, 3.0]) hsc = normfactor_combined( - [('mu1', 'normfactor'), ('mu2', 'normfactor')], mc, mega_mods, batch_size=4 + [("mu1", "normfactor"), ("mu2", "normfactor")], mc, mega_mods, batch_size=4 ) mod = hsc.apply( @@ -612,20 +612,20 @@ def test_normfactor(backend): def test_shapesys_zero(backend): mc = MockConfig( par_map={ - 'SigXsecOverSM': { - 'paramset': paramset( - name='SigXsecOverSM', + "SigXsecOverSM": { + "paramset": paramset( + name="SigXsecOverSM", is_scalar=True, n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'syst': { - 'paramset': constrained_by_poisson( - name='syst', + "syst": { + "paramset": constrained_by_poisson( + name="syst", is_scalar=False, n_parameters=5, inits=[0] * 5, @@ -634,38 +634,38 @@ def test_shapesys_zero(backend): auxdata=[None] * 5, factors=[None] * 5, ), - 'slice': slice(1, 6), + "slice": slice(1, 6), }, }, - channels=['channel1'], - channel_nbins={'channel1': 6}, - par_order=['SigXsecOverSM', 'syst'], - samples=['signal', 'background'], + channels=["channel1"], + channel_nbins={"channel1": 6}, + par_order=["SigXsecOverSM", "syst"], + samples=["signal", "background"], ) mega_mods = { - 'shapesys/syst': { - 'background': { - 'type': 'shapesys', - 'name': 'syst', - 'data': { - 'mask': [True, True, False, True, True, True], - 'nom_data': [100.0, 90.0, 0.0, 70, 0.1, 50], - 'uncrt': [10, 9, 1, 0.0, 0.1, 5], + "shapesys/syst": { + "background": { + "type": "shapesys", + "name": "syst", + "data": { + "mask": [True, True, False, True, True, True], + "nom_data": [100.0, 90.0, 0.0, 70, 0.1, 50], + "uncrt": [10, 9, 1, 0.0, 0.1, 5], }, }, - 'signal': { - 'type': 'shapesys', - 'name': 'syst', - 'data': { - 'mask': [False, False, False, False, False, False], - 'nom_data': [20.0, 10.0, 5.0, 3.0, 2.0, 1.0], - 'uncrt': [10, 9, 1, 0.0, 0.1, 5], + "signal": { + "type": "shapesys", + "name": "syst", + "data": { + "mask": [False, False, False, False, False, False], + "nom_data": [20.0, 10.0, 5.0, 3.0, 2.0, 1.0], + "uncrt": [10, 9, 1, 0.0, 0.1, 5], }, }, } } - hsc = shapesys_combined([('syst', 'shapesys')], mc, mega_mods) + hsc = shapesys_combined([("syst", "shapesys")], mc, mega_mods) mod = hsc.apply(pyhf.tensorlib.astensor([-10, 1.1, 1.2, 1.3, -20, -30])) shape = pyhf.tensorlib.shape(mod) @@ -678,64 +678,64 @@ def test_shapesys_zero(backend): def test_shapefactor(backend): mc = MockConfig( par_map={ - 'shapefac1': { - 'paramset': unconstrained( - name='shapefac1', + "shapefac1": { + "paramset": unconstrained( + name="shapefac1", is_scalar=False, n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False, ), - 'slice': slice(0, 1), + "slice": slice(0, 1), }, - 'shapefac2': { - 'paramset': unconstrained( - name='shapefac2', + "shapefac2": { + "paramset": unconstrained( + name="shapefac2", is_scalar=False, n_parameters=2, inits=[0, 0], bounds=[[0, 10], [0, 10]], fixed=False, ), - 'slice': slice(1, 3), + "slice": slice(1, 3), }, }, - par_order=['shapefac1', 'shapefac2'], - samples=['signal', 'background'], - channels=['chan_one', 'chan_two'], - channel_nbins={'chan_one': 1, 'chan_two': 2}, + par_order=["shapefac1", "shapefac2"], + samples=["signal", "background"], + channels=["chan_one", "chan_two"], + channel_nbins={"chan_one": 1, "chan_two": 2}, ) mega_mods = { - 'shapefactor/shapefac1': { - 'signal': { - 'type': 'shapefactor', - 'name': 'shapefac1', - 'data': {'mask': [True, False, False]}, + "shapefactor/shapefac1": { + "signal": { + "type": "shapefactor", + "name": "shapefac1", + "data": {"mask": [True, False, False]}, }, - 'background': { - 'type': 'shapefactor', - 'name': 'shapefac1', - 'data': {'mask': [True, False, False]}, + "background": { + "type": "shapefactor", + "name": "shapefac1", + "data": {"mask": [True, False, False]}, }, }, - 'shapefactor/shapefac2': { - 'signal': { - 'type': 'shapefactor', - 'name': 'shapefac2', - 'data': {'mask': [False, True, True]}, + "shapefactor/shapefac2": { + "signal": { + "type": "shapefactor", + "name": "shapefac2", + "data": {"mask": [False, True, True]}, }, - 'background': { - 'type': 'normfactor', - 'name': 'shapefac2', - 'data': {'mask': [False, True, True]}, + "background": { + "type": "normfactor", + "name": "shapefac2", + "data": {"mask": [False, True, True]}, }, }, } hsc = shapefactor_combined( - [('shapefac1', 'shapefactor'), ('shapefac2', 'shapefactor')], mc, mega_mods + [("shapefac1", "shapefactor"), ("shapefac2", "shapefactor")], mc, mega_mods ) mod = hsc.apply(pyhf.tensorlib.astensor([2.0, 3.0, 4.0])) @@ -747,7 +747,7 @@ def test_shapefactor(backend): assert np.allclose(mod[1, 0, 0], [1.0, 3.0, 4.0]) hsc = shapefactor_combined( - [('shapefac1', 'shapefactor'), ('shapefac2', 'shapefactor')], + [("shapefac1", "shapefactor"), ("shapefac2", "shapefactor")], mc, mega_mods, batch_size=4, diff --git a/tests/test_compat.py b/tests/test_compat.py index 7eb4bcc22d..a0039e3a6f 100644 --- a/tests/test_compat.py +++ b/tests/test_compat.py @@ -6,35 +6,35 @@ def test_interpretation(): - interp = pyhf.compat.interpret_rootname('gamma_foo_0') - assert interp['constrained'] == 'n/a' - assert not interp['is_scalar'] - assert interp['name'] == 'foo' - assert interp['element'] == 0 - - interp = pyhf.compat.interpret_rootname('alpha_foo') - assert interp['constrained'] - assert interp['is_scalar'] - assert interp['name'] == 'foo' - assert interp['element'] == 'n/a' - - interp = pyhf.compat.interpret_rootname('mu') - assert not interp['constrained'] - assert interp['is_scalar'] - assert interp['name'] == 'mu' - assert interp['element'] == 'n/a' - - interp = pyhf.compat.interpret_rootname('Lumi') - assert interp['name'] == 'lumi' - - interp = pyhf.compat.interpret_rootname('Lumi') - assert interp['name'] == 'lumi' + interp = pyhf.compat.interpret_rootname("gamma_foo_0") + assert interp["constrained"] == "n/a" + assert not interp["is_scalar"] + assert interp["name"] == "foo" + assert interp["element"] == 0 + + interp = pyhf.compat.interpret_rootname("alpha_foo") + assert interp["constrained"] + assert interp["is_scalar"] + assert interp["name"] == "foo" + assert interp["element"] == "n/a" + + interp = pyhf.compat.interpret_rootname("mu") + assert not interp["constrained"] + assert interp["is_scalar"] + assert interp["name"] == "mu" + assert interp["element"] == "n/a" + + interp = pyhf.compat.interpret_rootname("Lumi") + assert interp["name"] == "lumi" + + interp = pyhf.compat.interpret_rootname("Lumi") + assert interp["name"] == "lumi" with pytest.raises(ValueError): - pyhf.compat.interpret_rootname('gamma_foo') + pyhf.compat.interpret_rootname("gamma_foo") with pytest.raises(ValueError): - pyhf.compat.interpret_rootname('alpha_') + pyhf.compat.interpret_rootname("alpha_") def test_torootname(): @@ -42,35 +42,35 @@ def test_torootname(): model_2 = pyhf.simplemodels.uncorrelated_background([5], [50], [7]) model_3 = pyhf.simplemodels.uncorrelated_background([5, 6], [50, 50], [7, 8]) - assert pyhf.compat.paramset_to_rootnames(model_1.config.param_set('mu')) == 'mu' + assert pyhf.compat.paramset_to_rootnames(model_1.config.param_set("mu")) == "mu" assert ( pyhf.compat.paramset_to_rootnames( - model_1.config.param_set('correlated_bkg_uncertainty') + model_1.config.param_set("correlated_bkg_uncertainty") ) - == 'alpha_correlated_bkg_uncertainty' + == "alpha_correlated_bkg_uncertainty" ) assert pyhf.compat.paramset_to_rootnames( - model_2.config.param_set('uncorr_bkguncrt') - ) == ['gamma_uncorr_bkguncrt_0'] + model_2.config.param_set("uncorr_bkguncrt") + ) == ["gamma_uncorr_bkguncrt_0"] assert pyhf.compat.paramset_to_rootnames( - model_3.config.param_set('uncorr_bkguncrt') - ) == ['gamma_uncorr_bkguncrt_0', 'gamma_uncorr_bkguncrt_1'] + model_3.config.param_set("uncorr_bkguncrt") + ) == ["gamma_uncorr_bkguncrt_0", "gamma_uncorr_bkguncrt_1"] def test_fromxml(): parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input3/config/examples/example_ShapeSys.xml', - 'validation/xmlimport_input3', + "validation/xmlimport_input3/config/examples/example_ShapeSys.xml", + "validation/xmlimport_input3", ) # build the spec, strictly checks properties included spec = { - 'channels': parsed_xml['channels'], - 'parameters': parsed_xml['measurements'][0]['config']['parameters'], + "channels": parsed_xml["channels"], + "parameters": parsed_xml["measurements"][0]["config"]["parameters"], } - model = pyhf.Model(spec, poi_name='SigXsecOverSM') + model = pyhf.Model(spec, poi_name="SigXsecOverSM") - assert pyhf.compat.paramset_to_rootnames(model.config.param_set('lumi')) == 'Lumi' + assert pyhf.compat.paramset_to_rootnames(model.config.param_set("lumi")) == "Lumi" diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 676ff7ada8..efd82f3873 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -15,63 +15,63 @@ def __init__(self, par_map, par_order): self.auxdata = [] self.auxdata_order = [] for name in self.par_order: - self.auxdata = self.auxdata + self.par_map[name]['paramset'].auxdata + self.auxdata = self.auxdata + self.par_map[name]["paramset"].auxdata self.auxdata_order.append(name) self.npars = len(self.suggested_init()) def suggested_init(self): init = [] for name in self.par_order: - init = init + self.par_map[name]['paramset'].suggested_init + init = init + self.par_map[name]["paramset"].suggested_init return init def par_slice(self, name): - return self.par_map[name]['slice'] + return self.par_map[name]["slice"] def param_set(self, name): - return self.par_map[name]['paramset'] + return self.par_map[name]["paramset"] def test_numpy_pdf_inputs(backend): spec = { - 'channels': [ + "channels": [ { - 'name': 'firstchannel', - 'samples': [ + "name": "firstchannel", + "samples": [ { - 'name': 'mu', - 'data': [10.0, 10.0], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "mu", + "data": [10.0, 10.0], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'bkg1', - 'data': [50.0, 70.0], - 'modifiers': [ + "name": "bkg1", + "data": [50.0, 70.0], + "modifiers": [ { - 'name': 'stat_firstchannel', - 'type': 'staterror', - 'data': [12.0, 12.0], + "name": "stat_firstchannel", + "type": "staterror", + "data": [12.0, 12.0], } ], }, { - 'name': 'bkg2', - 'data': [30.0, 20.0], - 'modifiers': [ + "name": "bkg2", + "data": [30.0, 20.0], + "modifiers": [ { - 'name': 'stat_firstchannel', - 'type': 'staterror', - 'data': [5.0, 5.0], + "name": "stat_firstchannel", + "type": "staterror", + "data": [5.0, 5.0], } ], }, { - 'name': 'bkg3', - 'data': [20.0, 15.0], - 'modifiers': [ - {'name': 'bkg_norm', 'type': 'shapesys', 'data': [10, 10]} + "name": "bkg3", + "data": [20.0, 15.0], + "modifiers": [ + {"name": "bkg_norm", "type": "shapesys", "data": [10, 10]} ], }, ], @@ -94,18 +94,18 @@ def slow(self, auxdata, pars): end_index = start_index + parset.n_parameters thisauxdata = auxdata[start_index:end_index] start_index = end_index - if parset.pdf_type == 'normal': + if parset.pdf_type == "normal": paralphas = pars[parslice] sigmas = ( parset.sigmas - if hasattr(parset, 'sigmas') + if hasattr(parset, "sigmas") else tensorlib.ones(paralphas.shape) ) sigmas = tensorlib.astensor(sigmas) constraint_term = tensorlib.normal_logpdf( thisauxdata, paralphas, sigmas ) - elif parset.pdf_type == 'poisson': + elif parset.pdf_type == "poisson": paralphas = tensorlib.product( tensorlib.stack( [pars[parslice], tensorlib.astensor(parset.factors)] @@ -133,11 +133,11 @@ def fast(self, auxdata, pars): def test_batched_constraints(backend): config = MockConfig( - par_order=['pois1', 'pois2', 'norm1', 'norm2'], + par_order=["pois1", "pois2", "norm1", "norm2"], par_map={ - 'pois1': { - 'paramset': constrained_by_poisson( - name='pois1', + "pois1": { + "paramset": constrained_by_poisson( + name="pois1", is_scalar=False, n_parameters=1, inits=[1.0], @@ -146,12 +146,12 @@ def test_batched_constraints(backend): factors=[12], fixed=False, ), - 'slice': slice(0, 1), - 'auxdata': [1], + "slice": slice(0, 1), + "auxdata": [1], }, - 'pois2': { - 'paramset': constrained_by_poisson( - name='pois2', + "pois2": { + "paramset": constrained_by_poisson( + name="pois2", is_scalar=False, n_parameters=2, inits=[1.0] * 2, @@ -160,11 +160,11 @@ def test_batched_constraints(backend): factors=[13, 14], fixed=False, ), - 'slice': slice(1, 3), + "slice": slice(1, 3), }, - 'norm1': { - 'paramset': constrained_by_normal( - name='norm1', + "norm1": { + "paramset": constrained_by_normal( + name="norm1", is_scalar=False, n_parameters=2, inits=[0] * 2, @@ -173,11 +173,11 @@ def test_batched_constraints(backend): sigmas=[1.5, 2.0], fixed=False, ), - 'slice': slice(3, 5), + "slice": slice(3, 5), }, - 'norm2': { - 'paramset': constrained_by_normal( - name='norm2', + "norm2": { + "paramset": constrained_by_normal( + name="norm2", is_scalar=False, n_parameters=3, inits=[0] * 3, @@ -185,7 +185,7 @@ def test_batched_constraints(backend): auxdata=[0, 0, 0], fixed=False, ), - 'slice': slice(5, 8), + "slice": slice(5, 8), }, }, ) diff --git a/tests/test_custom_mods.py b/tests/test_custom_mods.py index a4cc09e16c..8b2fd26d22 100644 --- a/tests/test_custom_mods.py +++ b/tests/test_custom_mods.py @@ -10,14 +10,14 @@ class custom_builder: def __init__(self, pdfconfig): self.config = pdfconfig self.required_parsets = { - 'k1': [ + "k1": [ { - 'paramset_type': 'unconstrained', - 'n_parameters': 1, - 'is_constrained': False, - 'inits': (1.0,), - 'bounds': ((-5, 5),), - 'fixed': False, + "paramset_type": "unconstrained", + "n_parameters": 1, + "is_constrained": False, + "inits": (1.0,), + "bounds": ((-5, 5),), + "fixed": False, } ] } @@ -31,8 +31,8 @@ def finalize(self): class custom_applicator: - op_code = 'multiplication' - name = 'customfunc' + op_code = "multiplication" + name = "customfunc" def __init__( self, modifiers=None, pdfconfig=None, builder_data=None, batch_size=None @@ -49,32 +49,32 @@ def test_custom_mods(): model = pyhf.Model( { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': [10] * 20, - 'modifiers': [ + "name": "signal", + "data": [10] * 20, + "modifiers": [ { - 'name': 'singlemod', - 'type': 'customfunc', - 'data': None, + "name": "singlemod", + "type": "customfunc", + "data": None, }, ], }, - {'name': 'background', 'data': [300] * 20, 'modifiers': []}, + {"name": "background", "data": [300] * 20, "modifiers": []}, ], } ] }, modifier_set=modifier_set, - poi_name='k1', + poi_name="k1", validate=False, ) assert model - assert 'k1' in model.config.parameters + assert "k1" in model.config.parameters def test_missing_poi(): @@ -84,28 +84,28 @@ def test_missing_poi(): with pytest.raises(exceptions.InvalidModel): model = pyhf.Model( { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': [10] * 20, - 'modifiers': [ + "name": "signal", + "data": [10] * 20, + "modifiers": [ { - 'name': 'singlemod', - 'type': 'customfunc', - 'data': None, + "name": "singlemod", + "type": "customfunc", + "data": None, }, ], }, - {'name': 'background', 'data': [300] * 20, 'modifiers': []}, + {"name": "background", "data": [300] * 20, "modifiers": []}, ], } ] }, modifier_set=modifier_set, - poi_name='non_existent_poi', + poi_name="non_existent_poi", validate=False, ) assert model diff --git a/tests/test_events.py b/tests/test_events.py index cd70da9d6a..691d4762a5 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -4,7 +4,7 @@ def test_subscribe_event(): - ename = 'test' + ename = "test" m = mock.Mock() events.subscribe(ename)(m.__call__) @@ -17,7 +17,7 @@ def test_subscribe_event(): def test_event(): - ename = 'test' + ename = "test" m = mock.Mock() events.subscribe(ename)(m.__call__) @@ -28,7 +28,7 @@ def test_event(): def test_event_weakref(): - ename = 'test' + ename = "test" m = mock.Mock() events.subscribe(ename)(m.__call__) @@ -40,7 +40,7 @@ def test_event_weakref(): def test_disable_event(): - ename = 'test' + ename = "test" m = mock.Mock() noop, noop_m = events.noop, mock.Mock() @@ -64,16 +64,16 @@ def test_trigger_noevent(): noop, noop_m = events.noop, mock.Mock() events.noop = noop_m - assert 'fake' not in events.__events - assert events.trigger('fake') == events.noop - events.trigger('fake')() + assert "fake" not in events.__events + assert events.trigger("fake") == events.noop + events.trigger("fake")() noop_m.assert_called_once() events.noop = noop def test_subscribe_function(capsys): - ename = 'test' + ename = "test" def add(a, b): print(a + b) @@ -88,7 +88,7 @@ def add(a, b): def test_trigger_function(capsys): - ename = 'test' + ename = "test" def add(a, b): print(a + b) @@ -97,8 +97,8 @@ def add(a, b): postcall = mock.Mock() wrapped_add = events.register(ename)(add) - events.subscribe(f'{ename}::before')(precall.__call__) - events.subscribe(f'{ename}::after')(postcall.__call__) + events.subscribe(f"{ename}::before")(precall.__call__) + events.subscribe(f"{ename}::after")(postcall.__call__) precall.assert_not_called() postcall.assert_not_called() @@ -109,5 +109,5 @@ def add(a, b): precall.assert_called_once() postcall.assert_called_once() - del events.__events[f'{ename}::before'] - del events.__events[f'{ename}::after'] + del events.__events[f"{ename}::before"] + del events.__events[f"{ename}::after"] diff --git a/tests/test_export.py b/tests/test_export.py index 5c1ebed6e2..c7cb5bbad4 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -11,40 +11,40 @@ def spec_staterror(): spec = { - 'channels': [ + "channels": [ { - 'name': 'firstchannel', - 'samples': [ + "name": "firstchannel", + "samples": [ { - 'name': 'mu', - 'data': [10.0, 10.0], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "mu", + "data": [10.0, 10.0], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'bkg1', - 'data': [50.0, 70.0], - 'modifiers': [ + "name": "bkg1", + "data": [50.0, 70.0], + "modifiers": [ { - 'name': 'stat_firstchannel', - 'type': 'staterror', - 'data': [12.0, 12.0], + "name": "stat_firstchannel", + "type": "staterror", + "data": [12.0, 12.0], } ], }, { - 'name': 'bkg2', - 'data': [30.0, 20.0], - 'modifiers': [ + "name": "bkg2", + "data": [30.0, 20.0], + "modifiers": [ { - 'name': 'stat_firstchannel', - 'type': 'staterror', - 'data': [5.0, 5.0], + "name": "stat_firstchannel", + "type": "staterror", + "data": [5.0, 5.0], } ], }, - {'name': 'bkg3', 'data': [20.0, 15.0], 'modifiers': []}, + {"name": "bkg3", "data": [20.0, 15.0], "modifiers": []}, ], } ] @@ -58,27 +58,27 @@ def spec_histosys(): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'histosys', - 'data': { - 'lo_data': source['bindata']['bkgsys_dn'], - 'hi_data': source['bindata']['bkgsys_up'], + "name": "bkg_norm", + "type": "histosys", + "data": { + "lo_data": source["bindata"]["bkgsys_dn"], + "hi_data": source["bindata"]["bkgsys_up"], }, } ], @@ -96,25 +96,25 @@ def spec_normsys(): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'normsys', - 'data': {'lo': 0.9, 'hi': 1.1}, + "name": "bkg_norm", + "type": "normsys", + "data": {"lo": 0.9, "hi": 1.1}, } ], }, @@ -131,22 +131,22 @@ def spec_shapesys(): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ - {'name': 'bkg_norm', 'type': 'shapesys', 'data': [10, 10]} + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ + {"name": "bkg_norm", "type": "shapesys", "data": [10, 10]} ], }, ], @@ -162,22 +162,22 @@ def spec_shapefactor(): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ - {'name': 'bkg_norm', 'type': 'shapefactor', 'data': None} + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ + {"name": "bkg_norm", "type": "shapefactor", "data": None} ], }, ], @@ -208,59 +208,59 @@ def test_export_measurement(): "name": "NormalMeasurement", } modifiertypes = { - 'mu': 'normfactor', - 'lumi': 'lumi', - 'syst1': 'normsys', - 'syst2': 'histosys', - 'syst3': 'shapesys', - 'syst4': 'staterror', + "mu": "normfactor", + "lumi": "lumi", + "syst1": "normsys", + "syst2": "histosys", + "syst3": "shapesys", + "syst4": "staterror", } m = pyhf.writexml.build_measurement(measurementspec, modifiertypes) assert m is not None - assert m.attrib['Name'] == measurementspec['name'] - assert m.attrib['Lumi'] == str( - measurementspec['config']['parameters'][0]['auxdata'][0] + assert m.attrib["Name"] == measurementspec["name"] + assert m.attrib["Lumi"] == str( + measurementspec["config"]["parameters"][0]["auxdata"][0] ) - assert m.attrib['LumiRelErr'] == str( - measurementspec['config']['parameters'][0]['sigmas'][0] + assert m.attrib["LumiRelErr"] == str( + measurementspec["config"]["parameters"][0]["sigmas"][0] ) - poi = m.find('POI') + poi = m.find("POI") assert poi is not None - assert poi.text == measurementspec['config']['poi'] - paramsetting = m.find('ParamSetting') + assert poi.text == measurementspec["config"]["poi"] + paramsetting = m.find("ParamSetting") assert paramsetting is not None - assert 'alpha_syst1' in paramsetting.text - assert 'alpha_syst2' in paramsetting.text - assert 'gamma_syst3' in paramsetting.text - assert 'gamma_syst4' in paramsetting.text + assert "alpha_syst1" in paramsetting.text + assert "alpha_syst2" in paramsetting.text + assert "gamma_syst3" in paramsetting.text + assert "gamma_syst4" in paramsetting.text @pytest.mark.parametrize( "spec, has_root_data, attrs, modtype", [ - (spec_staterror(), True, ['Activate', 'HistoName'], 'staterror'), - (spec_histosys(), True, ['HistoNameHigh', 'HistoNameLow'], 'histosys'), - (spec_normsys(), False, ['High', 'Low'], 'normsys'), - (spec_shapesys(), True, ['ConstraintType', 'HistoName'], 'shapesys'), - (spec_shapefactor(), False, [], 'shapefactor'), + (spec_staterror(), True, ["Activate", "HistoName"], "staterror"), + (spec_histosys(), True, ["HistoNameHigh", "HistoNameLow"], "histosys"), + (spec_normsys(), False, ["High", "Low"], "normsys"), + (spec_shapesys(), True, ["ConstraintType", "HistoName"], "shapesys"), + (spec_shapefactor(), False, [], "shapefactor"), ], - ids=['staterror', 'histosys', 'normsys', 'shapesys', 'shapefactor'], + ids=["staterror", "histosys", "normsys", "shapesys", "shapefactor"], ) def test_export_modifier(mocker, caplog, spec, has_root_data, attrs, modtype): - channelspec = spec['channels'][0] - channelname = channelspec['name'] - samplespec = channelspec['samples'][1] - samplename = samplespec['name'] - sampledata = samplespec['data'] - modifierspec = samplespec['modifiers'][0] + channelspec = spec["channels"][0] + channelname = channelspec["name"] + samplespec = channelspec["samples"][1] + samplename = samplespec["name"] + sampledata = samplespec["data"] + modifierspec = samplespec["modifiers"][0] - assert modifierspec['type'] == modtype + assert modifierspec["type"] == modtype - mocker.patch('pyhf.writexml._ROOT_DATA_FILE') + mocker.patch("pyhf.writexml._ROOT_DATA_FILE") - with caplog.at_level(logging.DEBUG, 'pyhf.writexml'): + with caplog.at_level(logging.DEBUG, "pyhf.writexml"): modifier = pyhf.writexml.build_modifier( - {'measurements': [{'config': {'parameters': []}}]}, + {"measurements": [{"config": {"parameters": []}}]}, modifierspec, channelname, samplename, @@ -269,21 +269,21 @@ def test_export_modifier(mocker, caplog, spec, has_root_data, attrs, modtype): assert "Skipping modifier" not in caplog.text # if the modifier is a staterror, it has no Name - if modtype == 'staterror': - assert 'Name' not in modifier.attrib + if modtype == "staterror": + assert "Name" not in modifier.attrib else: - assert modifier.attrib['Name'] == modifierspec['name'] + assert modifier.attrib["Name"] == modifierspec["name"] assert all(attr in modifier.attrib for attr in attrs) assert pyhf.writexml._ROOT_DATA_FILE.__setitem__.called == has_root_data def test_export_bad_modifier(caplog): - with caplog.at_level(logging.DEBUG, 'pyhf.writexml'): + with caplog.at_level(logging.DEBUG, "pyhf.writexml"): pyhf.writexml.build_modifier( - {'measurements': [{'config': {'parameters': []}}]}, - {'name': 'fakeModifier', 'type': 'unknown-modifier'}, - 'fakeChannel', - 'fakeSample', + {"measurements": [{"config": {"parameters": []}}]}, + {"name": "fakeModifier", "type": "unknown-modifier"}, + "fakeChannel", + "fakeSample", None, ) assert "Skipping modifier fakeModifier(unknown-modifier)" in caplog.text @@ -292,28 +292,28 @@ def test_export_bad_modifier(caplog): @pytest.mark.parametrize( "spec, normfactor_config", [ - (spec_staterror(), dict(name='mu', inits=[1.0], bounds=[[0.0, 8.0]])), + (spec_staterror(), dict(name="mu", inits=[1.0], bounds=[[0.0, 8.0]])), (spec_histosys(), dict()), - (spec_normsys(), dict(name='mu', inits=[2.0], bounds=[[0.0, 10.0]])), - (spec_shapesys(), dict(name='mu', inits=[1.0], bounds=[[5.0, 10.0]])), + (spec_normsys(), dict(name="mu", inits=[2.0], bounds=[[0.0, 10.0]])), + (spec_shapesys(), dict(name="mu", inits=[1.0], bounds=[[5.0, 10.0]])), ], - ids=['upper-bound', 'empty-config', 'init', 'lower-bound'], + ids=["upper-bound", "empty-config", "init", "lower-bound"], ) def test_export_modifier_normfactor(mocker, spec, normfactor_config): - channelspec = spec['channels'][0] - channelname = channelspec['name'] - samplespec = channelspec['samples'][0] - samplename = samplespec['name'] - sampledata = samplespec['data'] - modifierspec = samplespec['modifiers'][0] - - mocker.patch('pyhf.writexml._ROOT_DATA_FILE') + channelspec = spec["channels"][0] + channelname = channelspec["name"] + samplespec = channelspec["samples"][0] + samplename = samplespec["name"] + sampledata = samplespec["data"] + modifierspec = samplespec["modifiers"][0] + + mocker.patch("pyhf.writexml._ROOT_DATA_FILE") modifier = pyhf.writexml.build_modifier( { - 'measurements': [ + "measurements": [ { - 'config': { - 'parameters': [normfactor_config] if normfactor_config else [] + "config": { + "parameters": [normfactor_config] if normfactor_config else [] } } ] @@ -324,53 +324,53 @@ def test_export_modifier_normfactor(mocker, spec, normfactor_config): sampledata, ) - assert all(attr in modifier.attrib for attr in ['Name', 'Val', 'High', 'Low']) - assert float(modifier.attrib['Val']) == normfactor_config.get('inits', [1.0])[0] + assert all(attr in modifier.attrib for attr in ["Name", "Val", "High", "Low"]) + assert float(modifier.attrib["Val"]) == normfactor_config.get("inits", [1.0])[0] assert ( - float(modifier.attrib['Low']) - == normfactor_config.get('bounds', [[0.0, 10.0]])[0][0] + float(modifier.attrib["Low"]) + == normfactor_config.get("bounds", [[0.0, 10.0]])[0][0] ) assert ( - float(modifier.attrib['High']) - == normfactor_config.get('bounds', [[0.0, 10.0]])[0][1] + float(modifier.attrib["High"]) + == normfactor_config.get("bounds", [[0.0, 10.0]])[0][1] ) @pytest.mark.parametrize( "spec", [spec_staterror(), spec_histosys(), spec_normsys(), spec_shapesys()], - ids=['staterror', 'histosys', 'normsys', 'shapesys'], + ids=["staterror", "histosys", "normsys", "shapesys"], ) def test_export_sample(mocker, spec): - channelspec = spec['channels'][0] - channelname = channelspec['name'] - samplespec = channelspec['samples'][1] + channelspec = spec["channels"][0] + channelname = channelspec["name"] + samplespec = channelspec["samples"][1] - mocker.patch('pyhf.writexml.build_modifier', return_value=ET.Element("Modifier")) - mocker.patch('pyhf.writexml._ROOT_DATA_FILE') + mocker.patch("pyhf.writexml.build_modifier", return_value=ET.Element("Modifier")) + mocker.patch("pyhf.writexml._ROOT_DATA_FILE") sample = pyhf.writexml.build_sample({}, samplespec, channelname) - assert sample.attrib['Name'] == samplespec['name'] - assert sample.attrib['HistoName'] - assert sample.attrib['InputFile'] - assert sample.attrib['NormalizeByTheory'] == str(False) + assert sample.attrib["Name"] == samplespec["name"] + assert sample.attrib["HistoName"] + assert sample.attrib["InputFile"] + assert sample.attrib["NormalizeByTheory"] == str(False) assert pyhf.writexml.build_modifier.called assert pyhf.writexml._ROOT_DATA_FILE.__setitem__.called @pytest.mark.parametrize( - "spec", [spec_staterror(), spec_shapesys()], ids=['staterror', 'shapesys'] + "spec", [spec_staterror(), spec_shapesys()], ids=["staterror", "shapesys"] ) def test_export_sample_zerodata(mocker, spec): - channelspec = spec['channels'][0] - channelname = channelspec['name'] - samplespec = channelspec['samples'][1] - samplename = samplespec['name'] - sampledata = [0.0] * len(samplespec['data']) - - mocker.patch('pyhf.writexml._ROOT_DATA_FILE') - for modifierspec in samplespec['modifiers']: + channelspec = spec["channels"][0] + channelname = channelspec["name"] + samplespec = channelspec["samples"][1] + samplename = samplespec["name"] + sampledata = [0.0] * len(samplespec["data"]) + + mocker.patch("pyhf.writexml._ROOT_DATA_FILE") + for modifierspec in samplespec["modifiers"]: pyhf.writexml.build_modifier( - {'measurements': [{'config': {'parameters': []}}]}, + {"measurements": [{"config": {"parameters": []}}]}, modifierspec, channelname, samplename, @@ -381,30 +381,30 @@ def test_export_sample_zerodata(mocker, spec): @pytest.mark.parametrize( "spec", [spec_staterror(), spec_histosys(), spec_normsys(), spec_shapesys()], - ids=['staterror', 'histosys', 'normsys', 'shapesys'], + ids=["staterror", "histosys", "normsys", "shapesys"], ) def test_export_channel(mocker, spec): - channelspec = spec['channels'][0] + channelspec = spec["channels"][0] - mocker.patch('pyhf.writexml.build_data', return_value=ET.Element("Data")) - mocker.patch('pyhf.writexml.build_sample', return_value=ET.Element("Sample")) - mocker.patch('pyhf.writexml._ROOT_DATA_FILE') + mocker.patch("pyhf.writexml.build_data", return_value=ET.Element("Data")) + mocker.patch("pyhf.writexml.build_sample", return_value=ET.Element("Sample")) + mocker.patch("pyhf.writexml._ROOT_DATA_FILE") channel = pyhf.writexml.build_channel({}, channelspec, {}) - assert channel.attrib['Name'] == channelspec['name'] - assert channel.attrib['InputFile'] + assert channel.attrib["Name"] == channelspec["name"] + assert channel.attrib["InputFile"] assert pyhf.writexml.build_data.called is False assert pyhf.writexml.build_sample.called assert pyhf.writexml._ROOT_DATA_FILE.__setitem__.called is False def test_export_data(mocker): - channelname = 'channel' - dataspec = [{'name': channelname, 'data': [0, 1, 2, 3]}] + channelname = "channel" + dataspec = [{"name": channelname, "data": [0, 1, 2, 3]}] - mocker.patch('pyhf.writexml._ROOT_DATA_FILE') + mocker.patch("pyhf.writexml._ROOT_DATA_FILE") data = pyhf.writexml.build_data(dataspec, channelname) - assert data.attrib['HistoName'] - assert data.attrib['InputFile'] + assert data.attrib["HistoName"] + assert data.attrib["InputFile"] assert pyhf.writexml._ROOT_DATA_FILE.__setitem__.called @@ -429,10 +429,10 @@ def test_export_root_histogram(mocker, tmp_path): def test_export_duplicate_hist_name(mocker): - mocker.patch('pyhf.writexml._ROOT_DATA_FILE', new={'duplicate_name': True}) + mocker.patch("pyhf.writexml._ROOT_DATA_FILE", new={"duplicate_name": True}) with pytest.raises(KeyError): - pyhf.writexml._export_root_histogram('duplicate_name', [0, 1, 2]) + pyhf.writexml._export_root_histogram("duplicate_name", [0, 1, 2]) def test_integer_data(datadir, mocker): @@ -453,16 +453,16 @@ def test_integer_data(datadir, mocker): @pytest.mark.parametrize( "fname,val,low,high", [ - ('workspace_no_parameter_inits.json', '1', '-5', '5'), - ('workspace_no_parameter_bounds.json', '5', '0', '10'), + ("workspace_no_parameter_inits.json", "1", "-5", "5"), + ("workspace_no_parameter_bounds.json", "5", "0", "10"), ], - ids=['no_inits', 'no_bounds'], + ids=["no_inits", "no_bounds"], ) def test_issue1814(datadir, mocker, fname, val, low, high): with open(datadir / fname, encoding="utf-8") as spec_file: spec = json.load(spec_file) - modifierspec = {'data': None, 'name': 'mu_sig', 'type': 'normfactor'} + modifierspec = {"data": None, "name": "mu_sig", "type": "normfactor"} channelname = None samplename = None sampledata = None @@ -471,7 +471,7 @@ def test_issue1814(datadir, mocker, fname, val, low, high): spec, modifierspec, channelname, samplename, sampledata ) assert modifier is not None - assert sorted(modifier.keys()) == ['High', 'Low', 'Name', 'Val'] - assert modifier.get('Val') == val - assert modifier.get('Low') == low - assert modifier.get('High') == high + assert sorted(modifier.keys()) == ["High", "Low", "Name", "Val"] + assert modifier.get("Val") == val + assert modifier.get("Low") == low + assert modifier.get("High") == high diff --git a/tests/test_import.py b/tests/test_import.py index 8204d79ab5..1d121ba74d 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -23,11 +23,11 @@ def assert_equal_dictionary(d1, d2): def test_dedupe_parameters(): parameters = [ - {'name': 'SigXsecOverSM', 'bounds': [[0.0, 10.0]]}, - {'name': 'SigXsecOverSM', 'bounds': [[0.0, 10.0]]}, + {"name": "SigXsecOverSM", "bounds": [[0.0, 10.0]]}, + {"name": "SigXsecOverSM", "bounds": [[0.0, 10.0]]}, ] assert len(pyhf.readxml.dedupe_parameters(parameters)) == 1 - parameters[1]['bounds'] = [[0.0, 2.0]] + parameters[1]["bounds"] = [[0.0, 2.0]] with pytest.raises(RuntimeError, match="SigXsecOverSM"): pyhf.readxml.dedupe_parameters(parameters) @@ -40,74 +40,74 @@ def test_process_normfactor_configs(): toplvl = ET.Element("Combination") meas = ET.Element( "Measurement", - Name='NormalMeasurement', + Name="NormalMeasurement", Lumi=str(1.0), LumiRelErr=str(0.017), ExportOnly=str(True), ) - poiel = ET.Element('POI') - poiel.text = 'mu_SIG' + poiel = ET.Element("POI") + poiel.text = "mu_SIG" meas.append(poiel) - setting = ET.Element('ParamSetting', Const='True') - setting.text = ' '.join(['Lumi', 'alpha_mu_both', 'alpha_mu_paramSettingOnly']) + setting = ET.Element("ParamSetting", Const="True") + setting.text = " ".join(["Lumi", "alpha_mu_both", "alpha_mu_paramSettingOnly"]) meas.append(setting) - setting = ET.Element('ParamSetting', Val='2.0') - setting.text = ' '.join(['alpha_mu_both']) + setting = ET.Element("ParamSetting", Val="2.0") + setting.text = " ".join(["alpha_mu_both"]) meas.append(setting) toplvl.append(meas) meas = ET.Element( "Measurement", - Name='ParallelMeasurement', + Name="ParallelMeasurement", Lumi=str(1.0), LumiRelErr=str(0.017), ExportOnly=str(True), ) - poiel = ET.Element('POI') - poiel.text = 'mu_BKG' + poiel = ET.Element("POI") + poiel.text = "mu_BKG" meas.append(poiel) - setting = ET.Element('ParamSetting', Val='3.0') - setting.text = ' '.join(['alpha_mu_both']) + setting = ET.Element("ParamSetting", Val="3.0") + setting.text = " ".join(["alpha_mu_both"]) meas.append(setting) toplvl.append(meas) other_parameter_configs = [ - dict(name='mu_both', inits=[1.0], bounds=[[1.0, 5.0]], fixed=False), - dict(name='mu_otherConfigOnly', inits=[1.0], bounds=[[0.0, 10.0]], fixed=False), + dict(name="mu_both", inits=[1.0], bounds=[[1.0, 5.0]], fixed=False), + dict(name="mu_otherConfigOnly", inits=[1.0], bounds=[[0.0, 10.0]], fixed=False), ] result = pyhf.readxml.process_measurements( toplvl, other_parameter_configs=other_parameter_configs ) result = { - m['name']: {k['name']: k for k in m['config']['parameters']} for m in result + m["name"]: {k["name"]: k for k in m["config"]["parameters"]} for m in result } assert result # make sure ParamSetting configs override NormFactor configs - assert result['NormalMeasurement']['mu_both']['fixed'] - assert result['NormalMeasurement']['mu_both']['inits'] == [2.0] - assert result['NormalMeasurement']['mu_both']['bounds'] == [[1.0, 5.0]] + assert result["NormalMeasurement"]["mu_both"]["fixed"] + assert result["NormalMeasurement"]["mu_both"]["inits"] == [2.0] + assert result["NormalMeasurement"]["mu_both"]["bounds"] == [[1.0, 5.0]] # make sure ParamSetting is doing the right thing - assert result['NormalMeasurement']['mu_paramSettingOnly']['fixed'] - assert 'inits' not in result['NormalMeasurement']['mu_paramSettingOnly'] - assert 'bounds' not in result['NormalMeasurement']['mu_paramSettingOnly'] + assert result["NormalMeasurement"]["mu_paramSettingOnly"]["fixed"] + assert "inits" not in result["NormalMeasurement"]["mu_paramSettingOnly"] + assert "bounds" not in result["NormalMeasurement"]["mu_paramSettingOnly"] # make sure our code doesn't accidentally override other parameter configs - assert not result['NormalMeasurement']['mu_otherConfigOnly']['fixed'] - assert result['NormalMeasurement']['mu_otherConfigOnly']['inits'] == [1.0] - assert result['NormalMeasurement']['mu_otherConfigOnly']['bounds'] == [[0.0, 10.0]] + assert not result["NormalMeasurement"]["mu_otherConfigOnly"]["fixed"] + assert result["NormalMeasurement"]["mu_otherConfigOnly"]["inits"] == [1.0] + assert result["NormalMeasurement"]["mu_otherConfigOnly"]["bounds"] == [[0.0, 10.0]] # make sure settings from one measurement don't leak to other - assert not result['ParallelMeasurement']['mu_both']['fixed'] - assert result['ParallelMeasurement']['mu_both']['inits'] == [3.0] - assert result['ParallelMeasurement']['mu_both']['bounds'] == [[1.0, 5.0]] + assert not result["ParallelMeasurement"]["mu_both"]["fixed"] + assert result["ParallelMeasurement"]["mu_both"]["inits"] == [3.0] + assert result["ParallelMeasurement"]["mu_both"]["bounds"] == [[1.0, 5.0]] def test_import_histogram(): @@ -133,74 +133,74 @@ def test_import_histogram_KeyError(): def test_import_measurements(): parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input/config/example.xml', 'validation/xmlimport_input/' + "validation/xmlimport_input/config/example.xml", "validation/xmlimport_input/" ) - measurements = parsed_xml['measurements'] + measurements = parsed_xml["measurements"] assert len(measurements) == 4 - measurement_configs = measurements[0]['config'] + measurement_configs = measurements[0]["config"] - assert 'parameters' in measurement_configs - assert len(measurement_configs['parameters']) == 3 - parnames = [p['name'] for p in measurement_configs['parameters']] - assert sorted(parnames) == sorted(['lumi', 'SigXsecOverSM', 'syst1']) + assert "parameters" in measurement_configs + assert len(measurement_configs["parameters"]) == 3 + parnames = [p["name"] for p in measurement_configs["parameters"]] + assert sorted(parnames) == sorted(["lumi", "SigXsecOverSM", "syst1"]) - lumi_param_config = measurement_configs['parameters'][0] - assert 'auxdata' in lumi_param_config - assert lumi_param_config['auxdata'] == [1.0] - assert 'bounds' in lumi_param_config - assert lumi_param_config['bounds'] == [[0.5, 1.5]] - assert 'inits' in lumi_param_config - assert lumi_param_config['inits'] == [1.0] - assert 'sigmas' in lumi_param_config - assert lumi_param_config['sigmas'] == [0.1] + lumi_param_config = measurement_configs["parameters"][0] + assert "auxdata" in lumi_param_config + assert lumi_param_config["auxdata"] == [1.0] + assert "bounds" in lumi_param_config + assert lumi_param_config["bounds"] == [[0.5, 1.5]] + assert "inits" in lumi_param_config + assert lumi_param_config["inits"] == [1.0] + assert "sigmas" in lumi_param_config + assert lumi_param_config["sigmas"] == [0.1] -@pytest.mark.parametrize("const", ['False', 'True']) +@pytest.mark.parametrize("const", ["False", "True"]) def test_spaces_in_measurement_config(const): toplvl = ET.Element("Combination") meas = ET.Element( "Measurement", - Name='NormalMeasurement', + Name="NormalMeasurement", Lumi=str(1.0), LumiRelErr=str(0.017), ExportOnly=str(True), ) - poiel = ET.Element('POI') - poiel.text = 'mu_SIG ' # space + poiel = ET.Element("POI") + poiel.text = "mu_SIG " # space meas.append(poiel) - setting = ET.Element('ParamSetting', Const=const) - setting.text = ' '.join(['Lumi', 'alpha_mu_both']) + ' ' # spacces + setting = ET.Element("ParamSetting", Const=const) + setting.text = " ".join(["Lumi", "alpha_mu_both"]) + " " # spacces meas.append(setting) toplvl.append(meas) meas_json = pyhf.readxml.process_measurements(toplvl)[0] - assert meas_json['config']['poi'] == 'mu_SIG' - assert [x['name'] for x in meas_json['config']['parameters']] == ['lumi', 'mu_both'] + assert meas_json["config"]["poi"] == "mu_SIG" + assert [x["name"] for x in meas_json["config"]["parameters"]] == ["lumi", "mu_both"] -@pytest.mark.parametrize("const", ['False', 'True']) +@pytest.mark.parametrize("const", ["False", "True"]) def test_import_measurement_gamma_bins(const): toplvl = ET.Element("Combination") meas = ET.Element( "Measurement", - Name='NormalMeasurement', + Name="NormalMeasurement", Lumi=str(1.0), LumiRelErr=str(0.017), ExportOnly=str(True), ) - poiel = ET.Element('POI') - poiel.text = 'mu_SIG' + poiel = ET.Element("POI") + poiel.text = "mu_SIG" meas.append(poiel) - setting = ET.Element('ParamSetting', Const=const) - setting.text = ' '.join(['Lumi', 'alpha_mu_both', 'gamma_bin_0']) + setting = ET.Element("ParamSetting", Const=const) + setting.text = " ".join(["Lumi", "alpha_mu_both", "gamma_bin_0"]) meas.append(setting) - setting = ET.Element('ParamSetting', Val='2.0') - setting.text = ' '.join(['gamma_bin_0']) + setting = ET.Element("ParamSetting", Val="2.0") + setting.text = " ".join(["gamma_bin_0"]) meas.append(setting) toplvl.append(meas) @@ -213,38 +213,38 @@ def test_import_measurement_gamma_bins(const): "configfile,rootdir", [ ( - 'validation/xmlimport_input/config/example.xml', - 'validation/xmlimport_input/', + "validation/xmlimport_input/config/example.xml", + "validation/xmlimport_input/", ), ( - 'validation/xmlimport_input4/config/example.xml', - 'validation/xmlimport_input4/', + "validation/xmlimport_input4/config/example.xml", + "validation/xmlimport_input4/", ), ], - ids=['xmlimport_input', 'xmlimport_input_histoPath'], + ids=["xmlimport_input", "xmlimport_input_histoPath"], ) def test_import_prepHistFactory(configfile, rootdir): parsed_xml = pyhf.readxml.parse(configfile, rootdir) # build the spec, strictly checks properties included spec = { - 'channels': parsed_xml['channels'], - 'parameters': parsed_xml['measurements'][0]['config']['parameters'], + "channels": parsed_xml["channels"], + "parameters": parsed_xml["measurements"][0]["config"]["parameters"], } - pdf = pyhf.Model(spec, poi_name='SigXsecOverSM') + pdf = pyhf.Model(spec, poi_name="SigXsecOverSM") data = [ binvalue - for k in pdf.spec['channels'] + for k in pdf.spec["channels"] for binvalue in next( - obs for obs in parsed_xml['observations'] if obs['name'] == k['name'] - )['data'] + obs for obs in parsed_xml["observations"] if obs["name"] == k["name"] + )["data"] ] + pdf.config.auxdata - channels = {channel['name'] for channel in pdf.spec['channels']} + channels = {channel["name"] for channel in pdf.spec["channels"]} samples = { - channel['name']: [sample['name'] for sample in channel['samples']] - for channel in pdf.spec['channels'] + channel["name"]: [sample["name"] for sample in channel["samples"]] + for channel in pdf.spec["channels"] } ### @@ -253,21 +253,21 @@ def test_import_prepHistFactory(configfile, rootdir): # bkg2 stateror (2 bins) # bkg2 overallsys - assert 'channel1' in channels - assert 'signal' in samples['channel1'] - assert 'background1' in samples['channel1'] - assert 'background2' in samples['channel1'] + assert "channel1" in channels + assert "signal" in samples["channel1"] + assert "background1" in samples["channel1"] + assert "background2" in samples["channel1"] - assert pdf.spec['channels'][0]['samples'][1]['modifiers'][0]['type'] == 'lumi' - assert pdf.spec['channels'][0]['samples'][2]['modifiers'][0]['type'] == 'lumi' + assert pdf.spec["channels"][0]["samples"][1]["modifiers"][0]["type"] == "lumi" + assert pdf.spec["channels"][0]["samples"][2]["modifiers"][0]["type"] == "lumi" - assert pdf.spec['channels'][0]['samples'][2]['modifiers'][1]['type'] == 'staterror' - assert pdf.spec['channels'][0]['samples'][2]['modifiers'][1]['data'] == [0, 10.0] + assert pdf.spec["channels"][0]["samples"][2]["modifiers"][1]["type"] == "staterror" + assert pdf.spec["channels"][0]["samples"][2]["modifiers"][1]["data"] == [0, 10.0] - assert pdf.spec['channels'][0]['samples'][1]['modifiers'][1]['type'] == 'staterror' + assert pdf.spec["channels"][0]["samples"][1]["modifiers"][1]["type"] == "staterror" assert all( np.isclose( - pdf.spec['channels'][0]['samples'][1]['modifiers'][1]['data'], [5.0, 0.0] + pdf.spec["channels"][0]["samples"][1]["modifiers"][1]["data"], [5.0, 0.0] ) ) @@ -276,36 +276,36 @@ def test_import_prepHistFactory(configfile, rootdir): ).tolist() == [120.0, 110.0] assert pdf.config.auxdata_order == [ - 'lumi', - 'syst2', - 'syst3', - 'syst1', - 'staterror_channel1', + "lumi", + "syst2", + "syst3", + "syst1", + "staterror_channel1", ] assert data == [122.0, 112.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0] pars = pdf.config.suggested_init() - pars[pdf.config.par_slice('SigXsecOverSM')] = [2.0] + pars[pdf.config.par_slice("SigXsecOverSM")] = [2.0] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [140, 120] def test_import_histosys(): parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input2/config/example.xml', 'validation/xmlimport_input2' + "validation/xmlimport_input2/config/example.xml", "validation/xmlimport_input2" ) # build the spec, strictly checks properties included spec = { - 'channels': parsed_xml['channels'], - 'parameters': parsed_xml['measurements'][0]['config']['parameters'], + "channels": parsed_xml["channels"], + "parameters": parsed_xml["measurements"][0]["config"]["parameters"], } - pdf = pyhf.Model(spec, poi_name='SigXsecOverSM') + pdf = pyhf.Model(spec, poi_name="SigXsecOverSM") - channels = {channel['name']: channel for channel in pdf.spec['channels']} + channels = {channel["name"]: channel for channel in pdf.spec["channels"]} - assert channels['channel2']['samples'][0]['modifiers'][0]['type'] == 'lumi' - assert channels['channel2']['samples'][0]['modifiers'][1]['type'] == 'histosys' + assert channels["channel2"]["samples"][0]["modifiers"][0]["type"] == "lumi" + assert channels["channel2"]["samples"][0]["modifiers"][1]["type"] == "histosys" def test_import_filecache(mocker): @@ -314,12 +314,12 @@ def test_import_filecache(mocker): pyhf.readxml.clear_filecache() parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input/config/example.xml', 'validation/xmlimport_input/' + "validation/xmlimport_input/config/example.xml", "validation/xmlimport_input/" ) # call a second time (file should be cached now) parsed_xml2 = pyhf.readxml.parse( - 'validation/xmlimport_input/config/example.xml', 'validation/xmlimport_input/' + "validation/xmlimport_input/config/example.xml", "validation/xmlimport_input/" ) # check if uproot.open was only called once with the expected root file @@ -332,90 +332,90 @@ def test_import_filecache(mocker): def test_import_shapesys(): parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input3/config/examples/example_ShapeSys.xml', - 'validation/xmlimport_input3', + "validation/xmlimport_input3/config/examples/example_ShapeSys.xml", + "validation/xmlimport_input3", ) # build the spec, strictly checks properties included spec = { - 'channels': parsed_xml['channels'], - 'parameters': parsed_xml['measurements'][0]['config']['parameters'], + "channels": parsed_xml["channels"], + "parameters": parsed_xml["measurements"][0]["config"]["parameters"], } - pdf = pyhf.Model(spec, poi_name='SigXsecOverSM') + pdf = pyhf.Model(spec, poi_name="SigXsecOverSM") - channels = {channel['name']: channel for channel in pdf.spec['channels']} + channels = {channel["name"]: channel for channel in pdf.spec["channels"]} - assert channels['channel1']['samples'][1]['modifiers'][0]['type'] == 'lumi' - assert channels['channel1']['samples'][1]['modifiers'][1]['type'] == 'shapesys' + assert channels["channel1"]["samples"][1]["modifiers"][0]["type"] == "lumi" + assert channels["channel1"]["samples"][1]["modifiers"][1]["type"] == "shapesys" # NB: assert that relative uncertainty is converted to absolute uncertainty for shapesys - assert channels['channel1']['samples'][1]['data'] == pytest.approx([100.0, 1.0e-4]) - assert channels['channel1']['samples'][1]['modifiers'][1]['data'] == pytest.approx( + assert channels["channel1"]["samples"][1]["data"] == pytest.approx([100.0, 1.0e-4]) + assert channels["channel1"]["samples"][1]["modifiers"][1]["data"] == pytest.approx( [10.0, 1.5e-5] ) def test_import_normfactor_bounds(): parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input2/config/example.xml', 'validation/xmlimport_input2' + "validation/xmlimport_input2/config/example.xml", "validation/xmlimport_input2" ) ws = pyhf.Workspace(parsed_xml) - assert ('SigXsecOverSM', 'normfactor') in ws.modifiers + assert ("SigXsecOverSM", "normfactor") in ws.modifiers parameters = [ p - for p in ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + for p in ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ] - if p['name'] == 'SigXsecOverSM' + if p["name"] == "SigXsecOverSM" ] assert len(parameters) == 1 parameter = parameters[0] - assert parameter['bounds'] == [[0, 10]] + assert parameter["bounds"] == [[0, 10]] def test_import_shapefactor(): parsed_xml = pyhf.readxml.parse( - 'validation/xmlimport_input/config/examples/example_DataDriven.xml', - 'validation/xmlimport_input', + "validation/xmlimport_input/config/examples/example_DataDriven.xml", + "validation/xmlimport_input", ) # build the spec, strictly checks properties included spec = { - 'channels': parsed_xml['channels'], - 'parameters': parsed_xml['measurements'][0]['config']['parameters'], + "channels": parsed_xml["channels"], + "parameters": parsed_xml["measurements"][0]["config"]["parameters"], } - pdf = pyhf.Model(spec, poi_name='SigXsecOverSM') + pdf = pyhf.Model(spec, poi_name="SigXsecOverSM") - channels = {channel['name']: channel for channel in pdf.spec['channels']} + channels = {channel["name"]: channel for channel in pdf.spec["channels"]} - assert channels['controlRegion']['samples'][0]['modifiers'][0]['type'] == 'lumi' + assert channels["controlRegion"]["samples"][0]["modifiers"][0]["type"] == "lumi" assert ( - channels['controlRegion']['samples'][0]['modifiers'][1]['type'] == 'staterror' + channels["controlRegion"]["samples"][0]["modifiers"][1]["type"] == "staterror" ) - assert channels['controlRegion']['samples'][0]['modifiers'][2]['type'] == 'normsys' + assert channels["controlRegion"]["samples"][0]["modifiers"][2]["type"] == "normsys" assert ( - channels['controlRegion']['samples'][1]['modifiers'][0]['type'] == 'shapefactor' + channels["controlRegion"]["samples"][1]["modifiers"][0]["type"] == "shapefactor" ) def test_process_modifiers(mocker, caplog): sample = ET.Element( - "Sample", Name='testSample', HistoPath="", HistoName="testSample" + "Sample", Name="testSample", HistoPath="", HistoName="testSample" ) normfactor = ET.Element( - 'NormFactor', Name="myNormFactor", Val='1', Low="0", High="3" + "NormFactor", Name="myNormFactor", Val="1", Low="0", High="3" ) histosys = ET.Element( - 'HistoSys', Name='myHistoSys', HistoNameHigh='', HistoNameLow='' + "HistoSys", Name="myHistoSys", HistoNameHigh="", HistoNameLow="" ) - normsys = ET.Element('OverallSys', Name='myNormSys', High='1.05', Low='0.95') - shapesys = ET.Element('ShapeSys', Name='myShapeSys', HistoName='') + normsys = ET.Element("OverallSys", Name="myNormSys", High="1.05", Low="0.95") + shapesys = ET.Element("ShapeSys", Name="myShapeSys", HistoName="") shapefactor = ET.Element( "ShapeFactor", - Name='myShapeFactor', + Name="myShapeFactor", ) - staterror = ET.Element('StatError', Activate='True') - unknown_modifier = ET.Element('UnknownSys') + staterror = ET.Element("StatError", Activate="True") + unknown_modifier = ET.Element("UnknownSys") sample.append(normfactor) sample.append(histosys) @@ -427,56 +427,56 @@ def test_process_modifiers(mocker, caplog): _data = [0.0] _err = [1.0] - mocker.patch('pyhf.readxml.import_root_histogram', return_value=(_data, _err)) - with caplog.at_level(logging.DEBUG, 'pyhf.readxml'): - result = pyhf.readxml.process_sample(sample, '', '', '', 'myChannel') + mocker.patch("pyhf.readxml.import_root_histogram", return_value=(_data, _err)) + with caplog.at_level(logging.DEBUG, "pyhf.readxml"): + result = pyhf.readxml.process_sample(sample, "", "", "", "myChannel") assert "not considering modifier tag shab', + "sa,shb->shab", pyhf.tensorlib.ones(pyhf.tensorlib.shape(alphasets)), histogramssets[:, :, 1], ) @@ -169,7 +169,7 @@ def test_code0_validation(backend, do_tensorized_calc): ) -@pytest.mark.parametrize("do_tensorized_calc", [False, True], ids=['slow', 'fast']) +@pytest.mark.parametrize("do_tensorized_calc", [False, True], ids=["slow", "fast"]) def test_code1_validation(backend, do_tensorized_calc): histogramssets = [[[[0.9], [1.0], [1.1]]]] alphasets = pyhf.tensorlib.astensor([[-2, -1, 0, 1, 2]]) @@ -183,7 +183,7 @@ def test_code1_validation(backend, do_tensorized_calc): # calculate the actual change histogramssets = pyhf.tensorlib.astensor(histogramssets) allsets_allhistos_noms_repeated = pyhf.tensorlib.einsum( - 'sa,shb->shab', + "sa,shb->shab", pyhf.tensorlib.ones(pyhf.tensorlib.shape(alphasets)), histogramssets[:, :, 1], ) @@ -197,7 +197,7 @@ def test_code1_validation(backend, do_tensorized_calc): def test_invalid_interpcode(): with pytest.raises(pyhf.exceptions.InvalidInterpCode): - pyhf.interpolators.get('fake') + pyhf.interpolators.get("fake") with pytest.raises(pyhf.exceptions.InvalidInterpCode): pyhf.interpolators.get(1.2) diff --git a/tests/test_jit.py b/tests/test_jit.py index bc1cf2f9ce..888e6b5cce 100644 --- a/tests/test_jit.py +++ b/tests/test_jit.py @@ -6,17 +6,17 @@ @pytest.mark.parametrize( - 'return_fitted_val', [False, True], ids=['no_fitval', 'do_fitval'] + "return_fitted_val", [False, True], ids=["no_fitval", "do_fitval"] ) -@pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch']) -@pytest.mark.parametrize('do_grad', [False, True], ids=['no_grad', 'do_grad']) -@pytest.mark.parametrize('optimizer', ['scipy', 'minuit']) +@pytest.mark.parametrize("do_stitch", [False, True], ids=["no_stitch", "do_stitch"]) +@pytest.mark.parametrize("do_grad", [False, True], ids=["no_grad", "do_grad"]) +@pytest.mark.parametrize("optimizer", ["scipy", "minuit"]) def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val): pyhf.set_backend("jax", optimizer, precision="64b") pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata) - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 1.0, data, @@ -25,10 +25,10 @@ def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 2.0, data, @@ -37,9 +37,9 @@ def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' not in caplog.text + assert "jitting function" not in caplog.text - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fit( data, pdf, @@ -47,10 +47,10 @@ def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fit( data, pdf, @@ -58,9 +58,9 @@ def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' not in caplog.text + assert "jitting function" not in caplog.text - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 3.0, data, @@ -69,20 +69,20 @@ def test_jax_jit(caplog, optimizer, do_grad, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' not in caplog.text + assert "jitting function" not in caplog.text @pytest.mark.parametrize( - 'return_fitted_val', [False, True], ids=['no_fitval', 'do_fitval'] + "return_fitted_val", [False, True], ids=["no_fitval", "do_fitval"] ) -@pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch']) -@pytest.mark.parametrize('do_grad', [False, True], ids=['no_grad', 'do_grad']) +@pytest.mark.parametrize("do_stitch", [False, True], ids=["no_stitch", "do_stitch"]) +@pytest.mark.parametrize("do_grad", [False, True], ids=["no_grad", "do_grad"]) def test_jax_jit_switch_optimizer(caplog, do_grad, do_stitch, return_fitted_val): pyhf.set_backend("jax", "scipy", precision="64b") pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata) - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 1.0, data, @@ -91,11 +91,11 @@ def test_jax_jit_switch_optimizer(caplog, do_grad, do_stitch, return_fitted_val) do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() - pyhf.set_backend(pyhf.tensorlib, 'minuit') - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + pyhf.set_backend(pyhf.tensorlib, "minuit") + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 2.0, data, @@ -104,9 +104,9 @@ def test_jax_jit_switch_optimizer(caplog, do_grad, do_stitch, return_fitted_val) do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' not in caplog.text + assert "jitting function" not in caplog.text - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fit( data, pdf, @@ -114,11 +114,11 @@ def test_jax_jit_switch_optimizer(caplog, do_grad, do_stitch, return_fitted_val) do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() - pyhf.set_backend(pyhf.tensorlib, 'scipy') - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + pyhf.set_backend(pyhf.tensorlib, "scipy") + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fit( data, pdf, @@ -126,19 +126,19 @@ def test_jax_jit_switch_optimizer(caplog, do_grad, do_stitch, return_fitted_val) do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' not in caplog.text + assert "jitting function" not in caplog.text @pytest.mark.parametrize( - 'return_fitted_val', [False, True], ids=['no_fitval', 'do_fitval'] + "return_fitted_val", [False, True], ids=["no_fitval", "do_fitval"] ) -@pytest.mark.parametrize('do_grad', [False, True], ids=['no_grad', 'do_grad']) +@pytest.mark.parametrize("do_grad", [False, True], ids=["no_grad", "do_grad"]) def test_jax_jit_enable_stitching(caplog, do_grad, return_fitted_val): pyhf.set_backend("jax", "scipy", precision="64b") pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata) - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 1.0, data, @@ -147,10 +147,10 @@ def test_jax_jit_enable_stitching(caplog, do_grad, return_fitted_val): do_stitch=False, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 1.0, data, @@ -159,20 +159,20 @@ def test_jax_jit_enable_stitching(caplog, do_grad, return_fitted_val): do_stitch=True, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() @pytest.mark.parametrize( - 'return_fitted_val', [False, True], ids=['no_fitval', 'do_fitval'] + "return_fitted_val", [False, True], ids=["no_fitval", "do_fitval"] ) -@pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch']) +@pytest.mark.parametrize("do_stitch", [False, True], ids=["no_stitch", "do_stitch"]) def test_jax_jit_enable_autograd(caplog, do_stitch, return_fitted_val): pyhf.set_backend("jax", "scipy", precision="64b") pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + pdf.config.auxdata) - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 1.0, data, @@ -181,10 +181,10 @@ def test_jax_jit_enable_autograd(caplog, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() - with caplog.at_level(logging.DEBUG, 'pyhf.optimize.opt_jax'): + with caplog.at_level(logging.DEBUG, "pyhf.optimize.opt_jax"): pyhf.infer.mle.fixed_poi_fit( 1.0, data, @@ -193,5 +193,5 @@ def test_jax_jit_enable_autograd(caplog, do_stitch, return_fitted_val): do_stitch=do_stitch, return_fitted_val=return_fitted_val, ) # jit - assert 'jitting function' in caplog.text + assert "jitting function" in caplog.text caplog.clear() diff --git a/tests/test_mixins.py b/tests/test_mixins.py index 99e304af8a..6f3018f7f8 100644 --- a/tests/test_mixins.py +++ b/tests/test_mixins.py @@ -5,30 +5,30 @@ @pytest.fixture( - scope='session', + scope="session", params=[ - ('validation/xmlimport_input/config/example.xml', 'validation/xmlimport_input/') + ("validation/xmlimport_input/config/example.xml", "validation/xmlimport_input/") ], - ids=['example-one'], + ids=["example-one"], ) def spec(request): return pyhf.readxml.parse(*request.param) def test_channel_summary_mixin(spec): - assert 'channels' in spec - mixin = pyhf.mixins._ChannelSummaryMixin(channels=spec['channels']) - assert mixin.channel_nbins == {'channel1': 2} - assert mixin.channels == ['channel1'] + assert "channels" in spec + mixin = pyhf.mixins._ChannelSummaryMixin(channels=spec["channels"]) + assert mixin.channel_nbins == {"channel1": 2} + assert mixin.channels == ["channel1"] assert mixin.modifiers == [ - ('SigXsecOverSM', 'normfactor'), - ('lumi', 'lumi'), - ('staterror_channel1', 'staterror'), - ('syst1', 'normsys'), - ('syst2', 'normsys'), - ('syst3', 'normsys'), + ("SigXsecOverSM", "normfactor"), + ("lumi", "lumi"), + ("staterror_channel1", "staterror"), + ("syst1", "normsys"), + ("syst2", "normsys"), + ("syst3", "normsys"), ] - assert mixin.samples == ['background1', 'background2', 'signal'] + assert mixin.samples == ["background1", "background2", "signal"] def test_channel_summary_mixin_empty(): diff --git a/tests/test_modifiers.py b/tests/test_modifiers.py index 52b453d6a8..f23aa3d65b 100644 --- a/tests/test_modifiers.py +++ b/tests/test_modifiers.py @@ -9,22 +9,22 @@ def test_shapefactor_build(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0] * 3, - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None}, + "name": "sample", + "data": [10.0] * 3, + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None}, ], }, { - 'name': 'another_sample', - 'data': [5.0] * 3, - 'modifiers': [ - {'name': 'freeshape', 'type': 'shapefactor', 'data': None} + "name": "another_sample", + "data": [5.0] * 3, + "modifiers": [ + {"name": "freeshape", "type": "shapefactor", "data": None} ], }, ], @@ -38,35 +38,35 @@ def test_shapefactor_build(): def test_staterror_holes(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel1', - 'samples': [ + "name": "channel1", + "samples": [ { - 'name': 'another_sample', - 'data': [50, 0, 0, 70], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None}, + "name": "another_sample", + "data": [50, 0, 0, 70], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None}, { - 'name': 'staterror_1', - 'type': 'staterror', - 'data': [5, 0, 5, 5], + "name": "staterror_1", + "type": "staterror", + "data": [5, 0, 5, 5], }, ], }, ], }, { - 'name': 'channel2', - 'samples': [ + "name": "channel2", + "samples": [ { - 'name': 'another_sample', - 'data': [50, 0, 10, 70], - 'modifiers': [ + "name": "another_sample", + "data": [50, 0, 10, 70], + "modifiers": [ { - 'name': 'staterror_2', - 'type': 'staterror', - 'data': [5, 0, 5, 5], + "name": "staterror_2", + "type": "staterror", + "data": [5, 0, 5, 5], } ], }, @@ -113,35 +113,35 @@ def test_staterror_holes(): def test_shapesys_holes(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel1', - 'samples': [ + "name": "channel1", + "samples": [ { - 'name': 'another_sample', - 'data': [50, 60, 0, 70], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None}, + "name": "another_sample", + "data": [50, 60, 0, 70], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None}, { - 'name': 'freeshape1', - 'type': 'shapesys', - 'data': [5, 0, 5, 5], + "name": "freeshape1", + "type": "shapesys", + "data": [5, 0, 5, 5], }, ], }, ], }, { - 'name': 'channel2', - 'samples': [ + "name": "channel2", + "samples": [ { - 'name': 'another_sample', - 'data': [50, 60, 0, 70], - 'modifiers': [ + "name": "another_sample", + "data": [50, 60, 0, 70], + "modifiers": [ { - 'name': 'freeshape2', - 'type': 'shapesys', - 'data': [5, 0, 5, 5], + "name": "freeshape2", + "type": "shapesys", + "data": [5, 0, 5, 5], } ], }, diff --git a/tests/test_notebooks.py b/tests/test_notebooks.py index bc005f2201..9aeea7e515 100644 --- a/tests/test_notebooks.py +++ b/tests/test_notebooks.py @@ -12,16 +12,16 @@ @pytest.fixture() def common_kwargs(tmp_path): - outputnb = tmp_path.joinpath('output.ipynb') + outputnb = tmp_path.joinpath("output.ipynb") return { - 'output_path': str(outputnb), - 'kernel_name': f'python{sys.version_info.major}', - 'progress_bar': False, + "output_path": str(outputnb), + "kernel_name": f"python{sys.version_info.major}", + "progress_bar": False, } def test_hello_world(common_kwargs): - pm.execute_notebook('docs/examples/notebooks/hello-world.ipynb', **common_kwargs) + pm.execute_notebook("docs/examples/notebooks/hello-world.ipynb", **common_kwargs) def test_xml_importexport(common_kwargs): @@ -41,12 +41,12 @@ def test_statisticalanalysis(common_kwargs): def test_shapefactor(common_kwargs): - pm.execute_notebook('docs/examples/notebooks/ShapeFactor.ipynb', **common_kwargs) + pm.execute_notebook("docs/examples/notebooks/ShapeFactor.ipynb", **common_kwargs) def test_multichannel_coupled_histos(common_kwargs): pm.execute_notebook( - 'docs/examples/notebooks/multichannel-coupled-histo.ipynb', + "docs/examples/notebooks/multichannel-coupled-histo.ipynb", parameters={"validation_datadir": str(Path.cwd() / "validation" / "data")}, **common_kwargs, ) @@ -54,12 +54,12 @@ def test_multichannel_coupled_histos(common_kwargs): def test_multibinpois(common_kwargs): pm.execute_notebook( - 'docs/examples/notebooks/multiBinPois.ipynb', + "docs/examples/notebooks/multiBinPois.ipynb", parameters={"validation_datadir": str(Path.cwd() / "validation" / "data")}, **common_kwargs, ) - nb = sb.read_notebook(common_kwargs['output_path']) - assert nb.scraps['number_2d_successpoints'].data > 200 + nb = sb.read_notebook(common_kwargs["output_path"]) + assert nb.scraps["number_2d_successpoints"].data > 200 def test_pullplot(common_kwargs): @@ -77,18 +77,18 @@ def test_impactplot(common_kwargs): def test_toys(common_kwargs): - pm.execute_notebook('docs/examples/notebooks/toys.ipynb', **common_kwargs) + pm.execute_notebook("docs/examples/notebooks/toys.ipynb", **common_kwargs) def test_learn_interpolationcodes(common_kwargs): pm.execute_notebook( - 'docs/examples/notebooks/learn/InterpolationCodes.ipynb', **common_kwargs + "docs/examples/notebooks/learn/InterpolationCodes.ipynb", **common_kwargs ) def test_learn_tensorizinginterpolations(common_kwargs): pm.execute_notebook( - 'docs/examples/notebooks/learn/TensorizingInterpolations.ipynb', **common_kwargs + "docs/examples/notebooks/learn/TensorizingInterpolations.ipynb", **common_kwargs ) diff --git a/tests/test_optim.py b/tests/test_optim.py index f325554eb4..bda1aadee1 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -22,51 +22,51 @@ def rosen(x): return sum(100.0 * (x[1:] - x[:-1] ** 2.0) ** 2.0 + (1 - x[:-1]) ** 2.0) x0 = tensorlib.astensor([1.3, 0.7, 0.8, 1.9, 1.2]) - res = minimize(rosen, x0, method='SLSQP', options=dict(disp=True)) + res = minimize(rosen, x0, method="SLSQP", options=dict(disp=True)) captured = capsys.readouterr() assert "Optimization terminated successfully" in captured.out assert pytest.approx([1.0, 1.0, 1.0, 1.0, 1.0], rel=5e-5) == tensorlib.tolist(res.x) -@pytest.mark.parametrize('do_stitch', [False, True], ids=['no_stitch', 'do_stitch']) +@pytest.mark.parametrize("do_stitch", [False, True], ids=["no_stitch", "do_stitch"]) @pytest.mark.parametrize( - 'tensorlib', + "tensorlib", [ pyhf.tensor.numpy_backend, pyhf.tensor.jax_backend, ], - ids=['numpy', 'jax'], + ids=["numpy", "jax"], ) @pytest.mark.parametrize( - 'optimizer', + "optimizer", [pyhf.optimize.scipy_optimizer, pyhf.optimize.minuit_optimizer], - ids=['scipy', 'minuit'], + ids=["scipy", "minuit"], ) -@pytest.mark.parametrize('do_grad', [False, True], ids=['no_grad', 'do_grad']) +@pytest.mark.parametrize("do_grad", [False, True], ids=["no_grad", "do_grad"]) def test_minimize(tensorlib, optimizer, do_grad, do_stitch): pyhf.set_backend(tensorlib(precision="64b"), optimizer()) m = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata) # numpy does not support grad - if pyhf.tensorlib.name == 'numpy' and do_grad: + if pyhf.tensorlib.name == "numpy" and do_grad: with pytest.raises(pyhf.exceptions.Unsupported): pyhf.infer.mle.fit(data, m, do_grad=do_grad) else: - identifier = f'{"do_grad" if do_grad else "no_grad"}-{pyhf.optimizer.name}-{pyhf.tensorlib.name}' + identifier = f"{'do_grad' if do_grad else 'no_grad'}-{pyhf.optimizer.name}-{pyhf.tensorlib.name}" expected = { # numpy does not do grad - 'do_grad-scipy-numpy': None, - 'do_grad-minuit-numpy': None, + "do_grad-scipy-numpy": None, + "do_grad-minuit-numpy": None, # no grad, scipy, 64b - 'no_grad-scipy-numpy': [0.49998815367220306, 0.9999696999038924], - 'no_grad-scipy-jax': [0.4999880886490433, 0.9999696971774877], + "no_grad-scipy-numpy": [0.49998815367220306, 0.9999696999038924], + "no_grad-scipy-jax": [0.4999880886490433, 0.9999696971774877], # do grad, scipy, 64b - 'do_grad-scipy-jax': [0.49998837853531414, 0.9999696648069285], + "do_grad-scipy-jax": [0.49998837853531414, 0.9999696648069285], # no grad, minuit, 64b - quite consistent - 'no_grad-minuit-numpy': [0.5000493563629738, 1.0000043833598724], - 'no_grad-minuit-jax': [0.5000493563528641, 1.0000043833614634], + "no_grad-minuit-numpy": [0.5000493563629738, 1.0000043833598724], + "no_grad-minuit-jax": [0.5000493563528641, 1.0000043833614634], # do grad, minuit, 64b - 'do_grad-minuit-jax': [0.500049321731032, 1.0000044174002167], + "do_grad-minuit-jax": [0.500049321731032, 1.0000044174002167], }[identifier] result = pyhf.infer.mle.fit(data, m, do_grad=do_grad, do_stitch=do_stitch) @@ -78,15 +78,15 @@ def test_minimize(tensorlib, optimizer, do_grad, do_stitch): # check fitted parameters assert pytest.approx( expected, rel=rel_tol, abs=abs_tol - ) == pyhf.tensorlib.tolist( - result - ), f"{identifier} = {pyhf.tensorlib.tolist(result)}" + ) == pyhf.tensorlib.tolist(result), ( + f"{identifier} = {pyhf.tensorlib.tolist(result)}" + ) @pytest.mark.parametrize( - 'optimizer', + "optimizer", [OptimizerMixin, pyhf.optimize.scipy_optimizer, pyhf.optimize.minuit_optimizer], - ids=['mixin', 'scipy', 'minuit'], + ids=["mixin", "scipy", "minuit"], ) def test_optimizer_mixin_extra_kwargs(optimizer): with pytest.raises(pyhf.exceptions.Unsupported): @@ -94,9 +94,9 @@ def test_optimizer_mixin_extra_kwargs(optimizer): @pytest.mark.parametrize( - 'backend,backend_new', - itertools.permutations([('numpy', False), ('jax', True)], 2), - ids=lambda pair: f'{pair[0]}', + "backend,backend_new", + itertools.permutations([("numpy", False), ("jax", True)], 2), + ids=lambda pair: f"{pair[0]}", ) def test_minimize_do_grad_autoconfig(mocker, backend, backend_new): backend, do_grad = backend @@ -105,20 +105,20 @@ def test_minimize_do_grad_autoconfig(mocker, backend, backend_new): # patch all we need from pyhf.optimize import mixins - shim = mocker.patch.object(mixins, 'shim', return_value=({}, lambda x: True)) - mocker.patch.object(OptimizerMixin, '_internal_minimize') - mocker.patch.object(OptimizerMixin, '_internal_postprocess') + shim = mocker.patch.object(mixins, "shim", return_value=({}, lambda x: True)) + mocker.patch.object(OptimizerMixin, "_internal_minimize") + mocker.patch.object(OptimizerMixin, "_internal_postprocess") # start with first backend - pyhf.set_backend(backend, 'scipy') + pyhf.set_backend(backend, "scipy") m = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata) assert pyhf.tensorlib.default_do_grad == do_grad pyhf.infer.mle.fit(data, m) - assert shim.call_args[1]['do_grad'] == pyhf.tensorlib.default_do_grad + assert shim.call_args[1]["do_grad"] == pyhf.tensorlib.default_do_grad pyhf.infer.mle.fit(data, m, do_grad=not (pyhf.tensorlib.default_do_grad)) - assert shim.call_args[1]['do_grad'] != pyhf.tensorlib.default_do_grad + assert shim.call_args[1]["do_grad"] != pyhf.tensorlib.default_do_grad # now switch to new backend and see what happens pyhf.set_backend(backend_new) @@ -127,9 +127,9 @@ def test_minimize_do_grad_autoconfig(mocker, backend, backend_new): assert pyhf.tensorlib.default_do_grad == do_grad_new pyhf.infer.mle.fit(data, m) - assert shim.call_args[1]['do_grad'] == pyhf.tensorlib.default_do_grad + assert shim.call_args[1]["do_grad"] == pyhf.tensorlib.default_do_grad pyhf.infer.mle.fit(data, m, do_grad=not (pyhf.tensorlib.default_do_grad)) - assert shim.call_args[1]['do_grad'] != pyhf.tensorlib.default_do_grad + assert shim.call_args[1]["do_grad"] != pyhf.tensorlib.default_do_grad def test_minuit_strategy_do_grad(mocker, backend): @@ -141,7 +141,7 @@ def test_minuit_strategy_do_grad(mocker, backend): one automatically sets the minuit strategy=1. """ pyhf.set_backend(pyhf.tensorlib, pyhf.optimize.minuit_optimizer(tolerance=0.2)) - spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize') + spy = mocker.spy(pyhf.optimize.minuit_optimizer, "_minimize") m = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata) @@ -159,12 +159,12 @@ def test_minuit_strategy_do_grad(mocker, backend): assert spy.spy_return.minuit.strategy == 1 -@pytest.mark.parametrize('strategy', [0, 1, 2]) +@pytest.mark.parametrize("strategy", [0, 1, 2]) def test_minuit_strategy_global(mocker, backend, strategy): pyhf.set_backend( pyhf.tensorlib, pyhf.optimize.minuit_optimizer(strategy=strategy, tolerance=0.2) ) - spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize') + spy = mocker.spy(pyhf.optimize.minuit_optimizer, "_minimize") model = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + model.config.auxdata) @@ -203,9 +203,9 @@ def test_set_tolerance(backend): @pytest.mark.parametrize( - 'optimizer', + "optimizer", [pyhf.optimize.scipy_optimizer, pyhf.optimize.minuit_optimizer], - ids=['scipy', 'minuit'], + ids=["scipy", "minuit"], ) def test_optimizer_unsupported_minimizer_options(optimizer): pyhf.set_backend(pyhf.default_backend, optimizer()) @@ -217,12 +217,12 @@ def test_optimizer_unsupported_minimizer_options(optimizer): pyhf.infer.mle.fit(data, m, unsupported_minimizer_options=False) -@pytest.mark.parametrize('return_result_obj', [False, True], ids=['no_obj', 'obj']) -@pytest.mark.parametrize('return_fitted_val', [False, True], ids=['no_fval', 'fval']) +@pytest.mark.parametrize("return_result_obj", [False, True], ids=["no_obj", "obj"]) +@pytest.mark.parametrize("return_fitted_val", [False, True], ids=["no_fval", "fval"]) @pytest.mark.parametrize( - 'optimizer', + "optimizer", [pyhf.optimize.scipy_optimizer, pyhf.optimize.minuit_optimizer], - ids=['scipy', 'minuit'], + ids=["scipy", "minuit"], ) def test_optimizer_return_values(optimizer, return_fitted_val, return_result_obj): pyhf.set_backend(pyhf.default_backend, optimizer()) @@ -246,45 +246,45 @@ def test_optimizer_return_values(optimizer, return_fitted_val, return_result_obj assert isinstance(result[-1], OptimizeResult) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source(): source = { - 'binning': [2, -0.5, 1.5], - 'bindata': { - 'data': [120.0, 180.0], - 'bkg': [100.0, 150.0], - 'bkgsys_up': [102, 190], - 'bkgsys_dn': [98, 100], - 'sig': [30.0, 95.0], + "binning": [2, -0.5, 1.5], + "bindata": { + "data": [120.0, 180.0], + "bkg": [100.0, 150.0], + "bkgsys_up": [102, 190], + "bkgsys_dn": [98, 100], + "sig": [30.0, 95.0], }, } return source -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec(source): spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'histosys', - 'data': { - 'lo_data': source['bindata']['bkgsys_dn'], - 'hi_data': source['bindata']['bkgsys_up'], + "name": "bkg_norm", + "type": "histosys", + "data": { + "lo_data": source["bindata"]["bkgsys_dn"], + "hi_data": source["bindata"]["bkgsys_up"], }, } ], @@ -296,10 +296,10 @@ def spec(source): return spec -@pytest.mark.parametrize('mu', [1.0], ids=['mu=1']) +@pytest.mark.parametrize("mu", [1.0], ids=["mu=1"]) def test_optim(backend, source, spec, mu): pdf = pyhf.Model(spec, poi_name="mu") - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata init_pars = pdf.config.suggested_init() par_bounds = pdf.config.suggested_bounds() @@ -320,10 +320,10 @@ def test_optim(backend, source, spec, mu): assert pyhf.tensorlib.tolist(result) -@pytest.mark.parametrize('mu', [1.0], ids=['mu=1']) +@pytest.mark.parametrize("mu", [1.0], ids=["mu=1"]) def test_optim_with_value(backend, source, spec, mu): pdf = pyhf.Model(spec, poi_name="mu") - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata init_pars = pdf.config.suggested_init() par_bounds = pdf.config.suggested_bounds() @@ -347,11 +347,11 @@ def test_optim_with_value(backend, source, spec, mu): assert pytest.approx(17.52954975, rel=1e-5) == pyhf.tensorlib.tolist(fitted_val) -@pytest.mark.parametrize('mu', [1.0], ids=['mu=1']) +@pytest.mark.parametrize("mu", [1.0], ids=["mu=1"]) @pytest.mark.only_numpy_minuit def test_optim_uncerts(backend, source, spec, mu): pdf = pyhf.Model(spec, poi_name="mu") - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata init_pars = pdf.config.suggested_init() par_bounds = pdf.config.suggested_bounds() @@ -376,11 +376,11 @@ def test_optim_uncerts(backend, source, spec, mu): ) -@pytest.mark.parametrize('mu', [1.0], ids=['mu=1']) +@pytest.mark.parametrize("mu", [1.0], ids=["mu=1"]) @pytest.mark.only_numpy_minuit def test_optim_correlations(backend, source, spec, mu): pdf = pyhf.Model(spec, poi_name="mu") - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata init_pars = pdf.config.suggested_init() par_bounds = pdf.config.suggested_bounds() @@ -408,10 +408,10 @@ def test_optim_correlations(backend, source, spec, mu): @pytest.mark.parametrize( - 'has_reached_call_limit', [False, True], ids=['no_call_limit', 'call_limit'] + "has_reached_call_limit", [False, True], ids=["no_call_limit", "call_limit"] ) @pytest.mark.parametrize( - 'is_above_max_edm', [False, True], ids=['below_max_edm', 'above_max_edm'] + "is_above_max_edm", [False, True], ids=["below_max_edm", "above_max_edm"] ) def test_minuit_failed_optimization( monkeypatch, mocker, has_reached_call_limit, is_above_max_edm @@ -428,11 +428,11 @@ def fmin(self): mock.is_above_max_edm = is_above_max_edm return mock - monkeypatch.setattr(iminuit, 'Minuit', BadMinuit) - pyhf.set_backend('numpy', 'minuit') + monkeypatch.setattr(iminuit, "Minuit", BadMinuit) + pyhf.set_backend("numpy", "minuit") pdf = pyhf.simplemodels.uncorrelated_background([5], [10], [3.5]) data = [10] + pdf.config.auxdata - spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize') + spy = mocker.spy(pyhf.optimize.minuit_optimizer, "_minimize") with pytest.raises( pyhf.exceptions.FailedMinimization, match="Optimization failed" ) as exc_info: @@ -440,29 +440,29 @@ def fmin(self): assert isinstance(exc_info.value.result, OptimizeResult) - assert 'Optimization failed' in spy.spy_return.message + assert "Optimization failed" in spy.spy_return.message if has_reached_call_limit: - assert exc_info.match('Call limit was reached') - assert 'Call limit was reached' in spy.spy_return.message + assert exc_info.match("Call limit was reached") + assert "Call limit was reached" in spy.spy_return.message if is_above_max_edm: - assert exc_info.match('Estimated distance to minimum too large') - assert 'Estimated distance to minimum too large' in spy.spy_return.message + assert exc_info.match("Estimated distance to minimum too large") + assert "Estimated distance to minimum too large" in spy.spy_return.message def test_minuit_set_options(mocker): - pyhf.set_backend('numpy', 'minuit') + pyhf.set_backend("numpy", "minuit") pdf = pyhf.simplemodels.uncorrelated_background([5], [10], [3.5]) data = [10] + pdf.config.auxdata # no need to postprocess in this test - mocker.patch.object(OptimizerMixin, '_internal_postprocess') - spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize') + mocker.patch.object(OptimizerMixin, "_internal_postprocess") + spy = mocker.spy(pyhf.optimize.minuit_optimizer, "_minimize") pyhf.infer.mle.fit(data, pdf, tolerance=0.5, strategy=0) assert spy.spy_return.minuit.tol == 0.5 assert spy.spy_return.minuit.strategy == 0 def test_get_tensor_shim(monkeypatch): - monkeypatch.setattr(pyhf.tensorlib, 'name', 'fake_backend') + monkeypatch.setattr(pyhf.tensorlib, "name", "fake_backend") with pytest.raises(ValueError, match="No optimizer shim for fake_backend."): _get_tensor_shim() @@ -471,7 +471,7 @@ def test_stitch_pars(backend): tb, _ = backend passthrough = _make_stitch_pars() - pars = ['a', 'b', 1.0, 2.0, object()] + pars = ["a", "b", 1.0, 2.0, object()] assert passthrough(pars) == pars fixed_idx = [0, 3, 4] @@ -507,7 +507,7 @@ def test_init_pars_sync_fixed_values_minuit(mocker): # patch all we need from pyhf.optimize import opt_minuit - minuit = mocker.patch.object(getattr(opt_minuit, 'iminuit'), 'Minuit') + minuit = mocker.patch.object(getattr(opt_minuit, "iminuit"), "Minuit") minimizer = opt._get_minimizer(None, [9, 9, 9], [(0, 10)] * 3, fixed_vals=[(0, 1)]) assert minuit.called assert minuit.call_args.args[1] == [1, 9, 9] @@ -516,28 +516,28 @@ def test_init_pars_sync_fixed_values_minuit(mocker): def test_solver_options_behavior_scipy(mocker): - opt = pyhf.optimize.scipy_optimizer(solver_options={'arbitrary_option': 'foobar'}) + opt = pyhf.optimize.scipy_optimizer(solver_options={"arbitrary_option": "foobar"}) minimizer = mocker.MagicMock() opt._minimize(minimizer, None, [9, 9, 9], fixed_vals=[(0, 1)]) - assert 'arbitrary_option' in minimizer.call_args[1]['options'] - assert minimizer.call_args[1]['options']['arbitrary_option'] == 'foobar' + assert "arbitrary_option" in minimizer.call_args[1]["options"] + assert minimizer.call_args[1]["options"]["arbitrary_option"] == "foobar" opt._minimize( minimizer, None, [9, 9, 9], fixed_vals=[(0, 1)], - options={'solver_options': {'arbitrary_option': 'baz'}}, + options={"solver_options": {"arbitrary_option": "baz"}}, ) - assert 'arbitrary_option' in minimizer.call_args[1]['options'] - assert minimizer.call_args[1]['options']['arbitrary_option'] == 'baz' + assert "arbitrary_option" in minimizer.call_args[1]["options"] + assert minimizer.call_args[1]["options"]["arbitrary_option"] == "baz" def test_solver_options_scipy(mocker): - optimizer = pyhf.optimize.scipy_optimizer(solver_options={'ftol': 1e-5}) - pyhf.set_backend('numpy', optimizer) - assert pyhf.optimizer.solver_options == {'ftol': 1e-5} + optimizer = pyhf.optimize.scipy_optimizer(solver_options={"ftol": 1e-5}) + pyhf.set_backend("numpy", optimizer) + assert pyhf.optimizer.solver_options == {"ftol": 1e-5} model = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + model.config.auxdata) @@ -549,10 +549,10 @@ def test_solver_options_scipy(mocker): # It does raise a scipy.optimize.OptimizeWarning though. def test_bad_solver_options_scipy(mocker): optimizer = pyhf.optimize.scipy_optimizer( - solver_options={'arbitrary_option': 'foobar'} + solver_options={"arbitrary_option": "foobar"} ) - pyhf.set_backend('numpy', optimizer) - assert pyhf.optimizer.solver_options == {'arbitrary_option': 'foobar'} + pyhf.set_backend("numpy", optimizer) + assert pyhf.optimizer.solver_options == {"arbitrary_option": "foobar"} model = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10.0]) data = pyhf.tensorlib.astensor([125.0] + model.config.auxdata) @@ -564,12 +564,12 @@ def test_bad_solver_options_scipy(mocker): def test_minuit_param_names(mocker): - pyhf.set_backend('numpy', 'minuit') + pyhf.set_backend("numpy", "minuit") pdf = pyhf.simplemodels.uncorrelated_background([5], [10], [3.5]) data = [10] + pdf.config.auxdata _, result = pyhf.infer.mle.fit(data, pdf, return_result_obj=True) - assert 'minuit' in result - assert result.minuit.parameters == ('mu', 'uncorr_bkguncrt[0]') + assert "minuit" in result + assert result.minuit.parameters == ("mu", "uncorr_bkguncrt[0]") with patch( "pyhf.pdf._ModelConfig.par_names", new_callable=PropertyMock diff --git a/tests/test_paramsets.py b/tests/test_paramsets.py index 6f450de690..62fa3bb937 100644 --- a/tests/test_paramsets.py +++ b/tests/test_paramsets.py @@ -5,7 +5,7 @@ def test_paramset_unconstrained(): pset = paramsets.unconstrained( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -20,7 +20,7 @@ def test_paramset_unconstrained(): def test_paramset_constrained_custom_sigmas(): pset = paramsets.constrained_by_normal( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -38,7 +38,7 @@ def test_paramset_constrained_custom_sigmas(): def test_paramset_constrained_default_sigmas(): pset = paramsets.constrained_by_normal( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -55,7 +55,7 @@ def test_paramset_constrained_default_sigmas(): def test_paramset_constrained_custom_factors(): pset = paramsets.constrained_by_poisson( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -73,7 +73,7 @@ def test_paramset_constrained_custom_factors(): def test_paramset_constrained_missiing_factors(): pset = paramsets.constrained_by_poisson( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -106,7 +106,7 @@ def test_vector_fixed_set(): def test_bool_compression2(): pset = paramsets.constrained_by_poisson( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -122,7 +122,7 @@ def test_bool_compression2(): def test_bool_compression(): pset = paramsets.constrained_by_poisson( - name='foo', + name="foo", is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], @@ -142,7 +142,7 @@ def test_bool_compression(): def test_scalar_multiparam_failure(): with pytest.raises(ValueError): paramsets.paramset( - name='foo', + name="foo", is_scalar=True, n_parameters=5, inits=[0, 1, 2, 3, 4], diff --git a/tests/test_paramviewer.py b/tests/test_paramviewer.py index 5d85126d4a..4b0d8d3796 100644 --- a/tests/test_paramviewer.py +++ b/tests/test_paramviewer.py @@ -11,8 +11,8 @@ def test_paramviewer_simple_nonbatched(backend): view = ParamViewer( parshape, - {'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(5, 7)}}, - ['world', 'hello'], + {"hello": {"slice": slice(0, 2)}, "world": {"slice": slice(5, 7)}}, + ["world", "hello"], ) par_slice = view.get(pars) assert pyhf.tensorlib.tolist(par_slice[slice(2, 4)]) == [1, 2] @@ -42,8 +42,8 @@ def test_paramviewer_simple_batched(backend): view = ParamViewer( parshape, - {'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(3, 4)}}, - ['world', 'hello'], + {"hello": {"slice": slice(0, 2)}, "world": {"slice": slice(3, 4)}}, + ["world", "hello"], ) par_slice = view.get(pars) diff --git a/tests/test_patchset.py b/tests/test_patchset.py index 7adaf1d2b1..acbec84e32 100644 --- a/tests/test_patchset.py +++ b/tests/test_patchset.py @@ -9,9 +9,9 @@ @pytest.fixture( - scope='function', - params=['patchset_good.json', 'patchset_good_2_patches.json'], - ids=['patchset_good.json', 'patchset_good_2_patches.json'], + scope="function", + params=["patchset_good.json", "patchset_good_2_patches.json"], + ids=["patchset_good.json", "patchset_good_2_patches.json"], ) def patchset(datadir, request): with open(datadir.joinpath(request.param), encoding="utf-8") as spec_file: @@ -19,19 +19,19 @@ def patchset(datadir, request): return pyhf.PatchSet(spec) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def patch(): return pyhf.patchset.Patch( - {'metadata': {'name': 'test', 'values': [1.0, 2.0, 3.0]}, 'patch': {}} + {"metadata": {"name": "test", "values": [1.0, 2.0, 3.0]}, "patch": {}} ) @pytest.mark.parametrize( - 'patchset_file', + "patchset_file", [ - 'patchset_bad_empty_patches.json', - 'patchset_bad_no_version.json', - 'patchset_bad_wrong_valuetype.json', + "patchset_bad_empty_patches.json", + "patchset_bad_no_version.json", + "patchset_bad_wrong_valuetype.json", ], ) def test_patchset_invalid_spec(datadir, patchset_file): @@ -42,11 +42,11 @@ def test_patchset_invalid_spec(datadir, patchset_file): @pytest.mark.parametrize( - 'patchset_file', + "patchset_file", [ - 'patchset_bad_duplicate_patch_name.json', - 'patchset_bad_duplicate_patch_values.json', - 'patchset_bad_wrong_values_multiplicity.json', + "patchset_bad_duplicate_patch_name.json", + "patchset_bad_duplicate_patch_values.json", + "patchset_bad_wrong_values_multiplicity.json", ], ) def test_patchset_bad(datadir, patchset_file): @@ -57,16 +57,16 @@ def test_patchset_bad(datadir, patchset_file): def test_patchset_attributes(patchset): - assert 'hepdata' in patchset.references + assert "hepdata" in patchset.references assert patchset.description == "signal patchset for the SUSY Multi-b-jet analysis" assert len(patchset.digests) == 1 - assert patchset.digests['md5'] == "098f6bcd4621d373cade4e832627b4f6" + assert patchset.digests["md5"] == "098f6bcd4621d373cade4e832627b4f6" assert patchset.labels == ["mass_stop", "mass_neutralino"] assert patchset.version == "1.0.0" def test_patchset_get_patch_by_name(patchset): - assert patchset['Gtt_2100_5000_800'] + assert patchset["Gtt_2100_5000_800"] def test_patchset_get_patch_by_values(patchset): @@ -79,9 +79,9 @@ def test_patchset_get_nonexisting_patch(patchset): # Using asserts with str(exc_info.value) over pytest.raises(..., match="...") # to make it easier to check multiple strings. with pytest.raises(pyhf.exceptions.InvalidPatchLookup) as exc_info: - patchset.__getitem__('nonexisting_patch') - assert 'No patch associated with' in str(exc_info.value) - assert 'nonexisting_patch' in str(exc_info.value) + patchset.__getitem__("nonexisting_patch") + assert "No patch associated with" in str(exc_info.value) + assert "nonexisting_patch" in str(exc_info.value) def test_patchset_iterable(patchset): @@ -98,9 +98,9 @@ def test_patchset_len(patchset): def test_patchset_repr(patchset): assert repr(patchset) if len(patchset) == 1: - assert 'PatchSet object with 1 patch at' in repr(patchset) + assert "PatchSet object with 1 patch at" in repr(patchset) else: - assert f'PatchSet object with {len(patchset)} patches at' in repr(patchset) + assert f"PatchSet object with {len(patchset)} patches at" in repr(patchset) def test_patchset_verify(datadir): @@ -129,14 +129,14 @@ def test_patchset_apply(datadir): patchset = pyhf.PatchSet(json.load(patch_file)) with open(datadir.joinpath("example_bkgonly.json"), encoding="utf-8") as ws_file: ws = pyhf.Workspace(json.load(ws_file)) - with mock.patch('pyhf.patchset.PatchSet.verify') as m: + with mock.patch("pyhf.patchset.PatchSet.verify") as m: assert m.call_count == 0 - assert patchset.apply(ws, 'patch_channel1_signal_syst1') + assert patchset.apply(ws, "patch_channel1_signal_syst1") assert m.call_count == 1 def test_patch_hashable(patch): - assert patch.name == 'test' + assert patch.name == "test" assert isinstance(patch.values, tuple) assert patch.values == (1.0, 2.0, 3.0) @@ -153,7 +153,7 @@ def test_patch_equality(patch): def test_patchset_get_string_values(datadir): with open( - datadir.joinpath('patchset_good_stringvalues.json'), encoding="utf-8" + datadir.joinpath("patchset_good_stringvalues.json"), encoding="utf-8" ) as patch_file: patchset = pyhf.PatchSet(json.load(patch_file)) assert patchset["Gtt_2100_5000_800"] diff --git a/tests/test_pdf.py b/tests/test_pdf.py index 805ec05600..d62ada3d03 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -9,15 +9,15 @@ def test_minimum_model_spec(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'goodsample', - 'data': [1.0], - 'modifiers': [ - {'type': 'normfactor', 'name': 'mu', 'data': None} + "name": "goodsample", + "data": [1.0], + "modifiers": [ + {"type": "normfactor", "name": "mu", "data": None} ], }, ], @@ -33,11 +33,11 @@ def test_pdf_inputs(backend): "bindata": {"data": [55.0], "bkg": [50.0], "bkgerr": [7.0], "sig": [10.0]}, } pdf = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr'] + source["bindata"]["sig"], source["bindata"]["bkg"], source["bindata"]["bkgerr"] ) pars = pdf.config.suggested_init() - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata tensorlib, _ = backend assert tensorlib.shape(tensorlib.astensor(data)) == (2,) @@ -56,11 +56,11 @@ def test_invalid_pdf_pars(): "bindata": {"data": [55.0], "bkg": [50.0], "bkgerr": [7.0], "sig": [10.0]}, } pdf = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr'] + source["bindata"]["sig"], source["bindata"]["bkg"], source["bindata"]["bkgerr"] ) pars = pdf.config.suggested_init() + [1.0] - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata with pytest.raises(pyhf.exceptions.InvalidPdfParameters): pdf.logpdf(pars, data) @@ -72,17 +72,17 @@ def test_invalid_pdf_data(): "bindata": {"data": [55.0], "bkg": [50.0], "bkgerr": [7.0], "sig": [10.0]}, } pdf = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr'] + source["bindata"]["sig"], source["bindata"]["bkg"], source["bindata"]["bkgerr"] ) pars = pdf.config.suggested_init() - data = source['bindata']['data'] + [10.0] + pdf.config.auxdata + data = source["bindata"]["data"] + [10.0] + pdf.config.auxdata with pytest.raises(pyhf.exceptions.InvalidPdfData): pdf.logpdf(pars, data) -@pytest.mark.parametrize('batch_size', [None, 2]) +@pytest.mark.parametrize("batch_size", [None, 2]) def test_pdf_expected_data_by_sample(backend, batch_size): tb, _ = backend source = { @@ -90,9 +90,9 @@ def test_pdf_expected_data_by_sample(backend, batch_size): "bindata": {"data": [55.0], "bkg": [50.0], "bkgerr": [7.0], "sig": [10.0]}, } pdf = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], - source['bindata']['bkg'], - source['bindata']['bkgerr'], + source["bindata"]["sig"], + source["bindata"]["bkg"], + source["bindata"]["bkgerr"], batch_size=batch_size, ) @@ -108,11 +108,11 @@ def test_pdf_expected_data_by_sample(backend, batch_size): data = pdf.main_model.expected_data(init_pars, return_by_sample=True) if batch_size: - data = tb.tolist(tb.einsum('ij...->ji...', data)) + data = tb.tolist(tb.einsum("ij...->ji...", data)) sample_expected_data = dict(zip(pdf.config.samples, tb.tolist(data))) - assert sample_expected_data['background'] == tb.tolist(expected_bkg) - assert sample_expected_data['signal'] == tb.tolist(expected_sig) + assert sample_expected_data["background"] == tb.tolist(expected_bkg) + assert sample_expected_data["signal"] == tb.tolist(expected_sig) def test_pdf_basicapi_tests(backend): @@ -121,11 +121,11 @@ def test_pdf_basicapi_tests(backend): "bindata": {"data": [55.0], "bkg": [50.0], "bkgerr": [7.0], "sig": [10.0]}, } pdf = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr'] + source["bindata"]["sig"], source["bindata"]["bkg"], source["bindata"]["bkgerr"] ) pars = pdf.config.suggested_init() - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata tensorlib, _ = backend assert tensorlib.tolist(pdf.pdf(pars, data)) == pytest.approx( @@ -146,14 +146,14 @@ def test_pdf_basicapi_tests(backend): ) pdf = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], - source['bindata']['bkg'], - source['bindata']['bkgerr'], + source["bindata"]["sig"], + source["bindata"]["bkg"], + source["bindata"]["bkgerr"], batch_size=2, ) pars = [pdf.config.suggested_init()] * 2 - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata tensorlib, _ = backend assert tensorlib.tolist(pdf.pdf(pars, data)) == pytest.approx( @@ -197,46 +197,46 @@ def test_core_pdf_broadcasting(backend): def test_pdf_integration_staterror(backend): spec = { - 'channels': [ + "channels": [ { - 'name': 'firstchannel', - 'samples': [ + "name": "firstchannel", + "samples": [ { - 'name': 'mu', - 'data': [10.0, 10.0], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "mu", + "data": [10.0, 10.0], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'bkg1', - 'data': [50.0, 70.0], - 'modifiers': [ + "name": "bkg1", + "data": [50.0, 70.0], + "modifiers": [ { - 'name': 'stat_firstchannel', - 'type': 'staterror', - 'data': [12.0, 12.0], + "name": "stat_firstchannel", + "type": "staterror", + "data": [12.0, 12.0], } ], }, { - 'name': 'bkg2', - 'data': [30.0, 20.0], - 'modifiers': [ + "name": "bkg2", + "data": [30.0, 20.0], + "modifiers": [ { - 'name': 'stat_firstchannel', - 'type': 'staterror', - 'data': [5.0, 5.0], + "name": "stat_firstchannel", + "type": "staterror", + "data": [5.0, 5.0], } ], }, - {'name': 'bkg3', 'data': [20.0, 15.0], 'modifiers': []}, + {"name": "bkg3", "data": [20.0, 15.0], "modifiers": []}, ], } ] } pdf = pyhf.Model(spec) - par_set = pdf.config.param_set('stat_firstchannel') + par_set = pdf.config.param_set("stat_firstchannel") tensorlib, _ = backend uncerts = tensorlib.astensor([[12.0, 12.0], [5.0, 5.0]]) nominal = tensorlib.astensor([[50.0, 70.0], [30.0, 20.0]]) @@ -249,18 +249,18 @@ def test_pdf_integration_staterror(backend): def test_poiless_model(backend): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'goodsample', - 'data': [10.0], - 'modifiers': [ + "name": "goodsample", + "data": [10.0], + "modifiers": [ { - 'type': 'normsys', - 'name': 'shape', - 'data': {"hi": 0.5, "lo": 1.5}, + "type": "normsys", + "name": "shape", + "data": {"hi": 0.5, "lo": 1.5}, } ], }, @@ -282,18 +282,18 @@ def test_poiless_model(backend): def test_poiless_model_empty_string(backend): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'goodsample', - 'data': [10.0], - 'modifiers': [ + "name": "goodsample", + "data": [10.0], + "modifiers": [ { - 'type': 'normsys', - 'name': 'shape', - 'data': {"hi": 0.5, "lo": 1.5}, + "type": "normsys", + "name": "shape", + "data": {"hi": 0.5, "lo": 1.5}, } ], }, @@ -342,7 +342,7 @@ def test_pdf_integration_shapesys_zeros(backend): ] } pdf = pyhf.Model(spec) - par_set_syst = pdf.config.param_set('syst') + par_set_syst = pdf.config.param_set("syst") assert par_set_syst.n_parameters == 6 tensorlib, _ = backend @@ -361,27 +361,27 @@ def test_pdf_integration_histosys(backend): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'histosys', - 'data': { - 'lo_data': source['bindata']['bkgsys_dn'], - 'hi_data': source['bindata']['bkgsys_up'], + "name": "bkg_norm", + "type": "histosys", + "data": { + "lo_data": source["bindata"]["bkgsys_dn"], + "hi_data": source["bindata"]["bkgsys_up"], }, } ], @@ -394,31 +394,31 @@ def test_pdf_integration_histosys(backend): pars = [None, None] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [1.0], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [102, 190] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [2.0], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [104, 230] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [-1.0], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [98, 100] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [-2.0], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [96, 50] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [1.0], [1.0], ] @@ -427,7 +427,7 @@ def test_pdf_integration_histosys(backend): 190 + 95, ] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [1.0], [-1.0], ] @@ -443,25 +443,25 @@ def test_pdf_integration_normsys(backend): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'normsys', - 'data': {'lo': 0.9, 'hi': 1.1}, + "name": "bkg_norm", + "type": "normsys", + "data": {"lo": 0.9, "hi": 1.1}, } ], }, @@ -472,7 +472,7 @@ def test_pdf_integration_normsys(backend): pdf = pyhf.Model(spec) pars = [None, None] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [0.0], ] @@ -481,7 +481,7 @@ def test_pdf_integration_normsys(backend): [100, 150], ) - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [1.0], ] @@ -490,7 +490,7 @@ def test_pdf_integration_normsys(backend): [100 * 1.1, 150 * 1.1], ) - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [-1.0], ] @@ -507,22 +507,22 @@ def test_pdf_integration_shapesys(backend): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ - {'name': 'bkg_norm', 'type': 'shapesys', 'data': [10, 10]} + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ + {"name": "bkg_norm", "type": "shapesys", "data": [10, 10]} ], }, ], @@ -533,25 +533,25 @@ def test_pdf_integration_shapesys(backend): pars = [None, None] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [1.0, 1.0], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [100, 150] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [1.1, 1.0], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [100 * 1.1, 150] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [1.0, 1.1], ] assert pdf.expected_data(pars, include_auxdata=False).tolist() == [100, 150 * 1.1] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [1.1, 0.9], ] @@ -560,7 +560,7 @@ def test_pdf_integration_shapesys(backend): 150 * 0.9, ] - pars[pdf.config.par_slice('mu')], pars[pdf.config.par_slice('bkg_norm')] = [ + pars[pdf.config.par_slice("mu")], pars[pdf.config.par_slice("bkg_norm")] = [ [0.0], [0.9, 1.1], ] @@ -572,18 +572,18 @@ def test_pdf_integration_shapesys(backend): def test_invalid_modifier(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'ttbar', - 'data': [1], - 'modifiers': [ + "name": "ttbar", + "data": [1], + "modifiers": [ { - 'name': 'a_name', - 'type': 'this_should_not_exist', - 'data': [1], + "name": "a_name", + "type": "this_should_not_exist", + "data": [1], } ], } @@ -597,25 +597,25 @@ def test_invalid_modifier(): def test_invalid_modifier_name_resuse(): spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': [5.0], - 'modifiers': [ - {'name': 'reused_name', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": [5.0], + "modifiers": [ + {"name": "reused_name", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': [50.0], - 'modifiers': [ + "name": "background", + "data": [50.0], + "modifiers": [ { - 'name': 'reused_name', - 'type': 'normsys', - 'data': {'lo': 0.9, 'hi': 1.1}, + "name": "reused_name", + "type": "normsys", + "data": {"lo": 0.9, "hi": 1.1}, } ], }, @@ -624,7 +624,7 @@ def test_invalid_modifier_name_resuse(): ] } with pytest.raises(pyhf.exceptions.InvalidNameReuse): - pyhf.Model(spec, poi_name='reused_name') + pyhf.Model(spec, poi_name="reused_name") def test_override_paramset_defaults(): @@ -633,34 +633,34 @@ def test_override_paramset_defaults(): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ - {'name': 'bkg_norm', 'type': 'shapesys', 'data': [10, 10]} + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ + {"name": "bkg_norm", "type": "shapesys", "data": [10, 10]} ], }, ], } ], - 'parameters': [ - {'name': 'bkg_norm', 'inits': [99, 99], 'bounds': [[95, 95], [95, 95]]} + "parameters": [ + {"name": "bkg_norm", "inits": [99, 99], "bounds": [[95, 95], [95, 95]]} ], } pdf = pyhf.Model(spec) - assert pdf.config.param_set('bkg_norm').suggested_bounds == [[95, 95], [95, 95]] - assert pdf.config.param_set('bkg_norm').suggested_init == [99, 99] + assert pdf.config.param_set("bkg_norm").suggested_bounds == [[95, 95], [95, 95]] + assert pdf.config.param_set("bkg_norm").suggested_init == [99, 99] def test_override_paramsets_incorrect_num_parameters(): @@ -669,28 +669,28 @@ def test_override_paramsets_incorrect_num_parameters(): ) as spec_file: source = json.load(spec_file) spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ - {'name': 'bkg_norm', 'type': 'shapesys', 'data': [10, 10]} + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ + {"name": "bkg_norm", "type": "shapesys", "data": [10, 10]} ], }, ], } ], - 'parameters': [{'name': 'bkg_norm', 'inits': [99, 99], 'bounds': [[95, 95]]}], + "parameters": [{"name": "bkg_norm", "inits": [99, 99], "bounds": [[95, 95]]}], } with pytest.raises(pyhf.exceptions.InvalidModel): pyhf.Model(spec) @@ -735,12 +735,12 @@ def test_lumi_np_scaling(): } pdf = pyhf.pdf.Model(spec, poi_name="SigXsecOverSM") - poi_slice = pdf.config.par_slice('SigXsecOverSM') - lumi_slice = pdf.config.par_slice('lumi') + poi_slice = pdf.config.par_slice("SigXsecOverSM") + lumi_slice = pdf.config.par_slice("lumi") - index_bkg1 = pdf.config.samples.index('background1') - index_bkg2 = pdf.config.samples.index('background2') - index_sig = pdf.config.samples.index('signal') + index_bkg1 = pdf.config.samples.index("background1") + index_bkg2 = pdf.config.samples.index("background2") + index_sig = pdf.config.samples.index("signal") bkg1_slice = slice(index_bkg1, index_bkg1 + 1) bkg2_slice = slice(index_bkg2, index_bkg2 + 1) sig_slice = slice(index_sig, index_sig + 1) @@ -775,12 +775,12 @@ def test_lumi_np_scaling(): def test_sample_wrong_bins(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'goodsample', 'data': [1.0, 2.0], 'modifiers': []}, - {'name': 'badsample', 'data': [3.0, 4.0, 5.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "goodsample", "data": [1.0, 2.0], "modifiers": []}, + {"name": "badsample", "data": [3.0, 4.0, 5.0], "modifiers": []}, ], } ] @@ -790,11 +790,11 @@ def test_sample_wrong_bins(): @pytest.mark.parametrize( - 'measurements, msettings', + "measurements, msettings", [ ( None, - {'normsys': {'interpcode': 'code4'}, 'histosys': {'interpcode': 'code4p'}}, + {"normsys": {"interpcode": "code4"}, "histosys": {"interpcode": "code4p"}}, ) ], ) @@ -834,61 +834,61 @@ def test_unexpected_keyword_argument(measurements, msettings): def test_model_integration_fixed_parameters(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ - {'name': 'unfixed', 'type': 'normfactor', 'data': None} + "name": "sample", + "data": [10.0], + "modifiers": [ + {"name": "unfixed", "type": "normfactor", "data": None} ], }, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [{'name': 'mypoi', 'inits': [1], 'fixed': True}], + "parameters": [{"name": "mypoi", "inits": [1], "fixed": True}], } - model = pyhf.Model(spec, poi_name='mypoi') - assert model.config.suggested_fixed()[model.config.par_slice('mypoi')] == [True] + model = pyhf.Model(spec, poi_name="mypoi") + assert model.config.suggested_fixed()[model.config.par_slice("mypoi")] == [True] def test_model_integration_fixed_parameters_shapesys(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0] * 3, - 'modifiers': [ - {'name': 'unfixed', 'type': 'normfactor', 'data': None}, - {'name': 'uncorr', 'type': 'shapesys', 'data': [1.5] * 3}, + "name": "sample", + "data": [10.0] * 3, + "modifiers": [ + {"name": "unfixed", "type": "normfactor", "data": None}, + {"name": "uncorr", "type": "shapesys", "data": [1.5] * 3}, ], }, { - 'name': 'another_sample', - 'data': [5.0] * 3, - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0] * 3, + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [{'name': 'uncorr', 'inits': [1.0, 2.0, 3.0], 'fixed': True}], + "parameters": [{"name": "uncorr", "inits": [1.0, 2.0, 3.0], "fixed": True}], } - model = pyhf.Model(spec, poi_name='mypoi') - assert model.config.suggested_fixed()[model.config.par_slice('uncorr')] == [ + model = pyhf.Model(spec, poi_name="mypoi") + assert model.config.suggested_fixed()[model.config.par_slice("uncorr")] == [ True, True, True, @@ -928,8 +928,8 @@ def test_reproducible_model_spec(): workspace = pyhf.Workspace(ws) model_from_ws = workspace.model() - assert model_from_ws.spec['parameters'] == [ - {'bounds': [[0, 5]], 'inits': [1], 'name': 'mu'} + assert model_from_ws.spec["parameters"] == [ + {"bounds": [[0, 5]], "inits": [1], "name": "mu"} ] assert pyhf.Model(model_from_ws.spec) @@ -940,16 +940,16 @@ def test_par_names_scalar_nonscalar(): n_parameters==1. """ spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'goodsample', - 'data': [1.0], - 'modifiers': [ - {'type': 'normfactor', 'name': 'scalar', 'data': None}, - {'type': 'shapesys', 'name': 'nonscalar', 'data': [1.0]}, + "name": "goodsample", + "data": [1.0], + "modifiers": [ + {"type": "normfactor", "name": "scalar", "data": None}, + {"type": "shapesys", "name": "nonscalar", "data": [1.0]}, ], }, ], @@ -960,8 +960,8 @@ def test_par_names_scalar_nonscalar(): model = pyhf.Model(spec, poi_name="scalar") assert model.config.par_order == ["scalar", "nonscalar"] assert model.config.par_names == [ - 'scalar', - 'nonscalar[0]', + "scalar", + "nonscalar[0]", ] @@ -996,8 +996,8 @@ def make_model( "name": "corr_bkguncrt2", "type": "histosys", "data": { - 'hi_data': corrup_data, - 'lo_data': corrdn_data, + "hi_data": corrup_data, + "lo_data": corrdn_data, }, }, { @@ -1008,7 +1008,7 @@ def make_model( { "name": "norm", "type": "normsys", - "data": {'hi': normsys_up, 'lo': normsys_dn}, + "data": {"hi": normsys_up, "lo": normsys_dn}, }, ], } @@ -1037,8 +1037,8 @@ def make_model( "name": "corr_bkguncrt2", "type": "histosys", "data": { - 'hi_data': corrup_data, - 'lo_data': corrdn_data, + "hi_data": corrup_data, + "lo_data": corrdn_data, }, }, { @@ -1049,7 +1049,7 @@ def make_model( { "name": "norm", "type": "normsys", - "data": {'hi': normsys_up, 'lo': normsys_dn}, + "data": {"hi": normsys_up, "lo": normsys_dn}, }, ], } @@ -1059,13 +1059,13 @@ def make_model( } model = pyhf.Model( { - 'channels': spec['channels'], - 'parameters': [ + "channels": spec["channels"], + "parameters": [ { - 'name': 'lumi', - 'auxdata': [1.0], - 'bounds': [[0.5, 1.5]], - 'inits': [1.0], + "name": "lumi", + "auxdata": [1.0], + "bounds": [[0.5, 1.5]], + "inits": [1.0], "sigmas": [lumi_sigma], } ], @@ -1190,7 +1190,7 @@ def test_pdf_clipping(backend): ) # Minuit cannot handle negative yields, confirm that MLE fails for minuit specifically - if optimizer.name == 'minuit': + if optimizer.name == "minuit": with pytest.raises(pyhf.exceptions.FailedMinimization): pyhf.infer.mle.fit(data, model) else: diff --git a/tests/test_public_api.py b/tests/test_public_api.py index 5426ecddef..ab8c774d78 100644 --- a/tests/test_public_api.py +++ b/tests/test_public_api.py @@ -6,7 +6,7 @@ import pyhf -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def model_setup(backend): np.random.seed(0) n_bins = 100 @@ -101,7 +101,7 @@ def test_custom_backend_name_supported(): class custom_backend: def __init__(self, **kwargs): self.name = "jax" - self.precision = '64b' + self.precision = "64b" def _setup(self): pass @@ -123,7 +123,7 @@ def test_custom_backend_name_notsupported(): class custom_backend: def __init__(self, **kwargs): self.name = "notsupported" - self.precision = '64b' + self.precision = "64b" def _setup(self): pass @@ -192,14 +192,14 @@ def test_pdf_batched(backend): "bindata": {"data": [55.0], "bkg": [50.0], "bkgerr": [7.0], "sig": [10.0]}, } model = pyhf.simplemodels.uncorrelated_background( - source['bindata']['sig'], - source['bindata']['bkg'], - source['bindata']['bkgerr'], + source["bindata"]["sig"], + source["bindata"]["bkg"], + source["bindata"]["bkgerr"], batch_size=2, ) pars = [model.config.suggested_init()] * 2 - data = source['bindata']['data'] + model.config.auxdata + data = source["bindata"]["data"] + model.config.auxdata model.pdf(pars, data) model.expected_data(pars) @@ -207,20 +207,20 @@ def test_pdf_batched(backend): def test_set_schema_path(monkeypatch): monkeypatch.setattr( - pyhf.schema.variables, 'schemas', pyhf.schema.variables.schemas, raising=True + pyhf.schema.variables, "schemas", pyhf.schema.variables.schemas, raising=True ) - new_path = pathlib.Path('a/new/path') + new_path = pathlib.Path("a/new/path") pyhf.schema(new_path) assert pyhf.schema.path == new_path def test_set_schema_path_context(monkeypatch): monkeypatch.setattr( - pyhf.schema.variables, 'schemas', pyhf.schema.variables.schemas, raising=True + pyhf.schema.variables, "schemas", pyhf.schema.variables.schemas, raising=True ) - new_path = pathlib.Path('a/new/path') + new_path = pathlib.Path("a/new/path") with pyhf.schema(new_path): assert pyhf.schema.path == new_path @@ -228,10 +228,10 @@ def test_set_schema_path_context(monkeypatch): def test_pdf_set_poi(backend): model = pyhf.simplemodels.uncorrelated_background([5.0], [10.0], [2.5]) assert model.config.poi_index == 0 - assert model.config.poi_name == 'mu' - model.config.set_poi('uncorr_bkguncrt') + assert model.config.poi_name == "mu" + model.config.set_poi("uncorr_bkguncrt") assert model.config.poi_index == 1 - assert model.config.poi_name == 'uncorr_bkguncrt' + assert model.config.poi_name == "uncorr_bkguncrt" model.config.set_poi(None) assert model.config.poi_index is None assert model.config.poi_name is None diff --git a/tests/test_regression.py b/tests/test_regression.py index a6396a0b11..4377e9dc58 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -25,8 +25,8 @@ def calculate_CLs(bkgonly_json, signal_patch_json): measurement_name=None, patches=[signal_patch_json], modifier_settings={ - 'normsys': {'interpcode': 'code4'}, - 'histosys': {'interpcode': 'code4p'}, + "normsys": {"interpcode": "code4"}, + "histosys": {"interpcode": "code4p"}, }, ) result = pyhf.infer.hypotest( diff --git a/tests/test_schema.py b/tests/test_schema.py index 384fcf0276..ea7ec670f0 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -8,22 +8,22 @@ import pyhf -@pytest.mark.parametrize('version', ['1.0.0']) +@pytest.mark.parametrize("version", ["1.0.0"]) @pytest.mark.parametrize( - 'schema', ['defs.json', 'measurement.json', 'model.json', 'workspace.json'] + "schema", ["defs.json", "measurement.json", "model.json", "workspace.json"] ) def test_get_schema(version, schema): - assert pyhf.schema.load_schema(f'{version}/{schema}') + assert pyhf.schema.load_schema(f"{version}/{schema}") def test_load_missing_schema(): with pytest.raises(IOError): - pyhf.schema.load_schema('fake_schema.json') + pyhf.schema.load_schema("fake_schema.json") def test_schema_attributes(): - assert hasattr(pyhf.schema, 'version') - assert hasattr(pyhf.schema, 'path') + assert hasattr(pyhf.schema, "version") + assert hasattr(pyhf.schema, "path") assert pyhf.schema.version assert pyhf.schema.path @@ -43,10 +43,10 @@ def self_restoring_schema_globals(): def test_schema_changeable(datadir, monkeypatch, self_restoring_schema_globals): monkeypatch.setattr( - pyhf.schema.variables, 'schemas', pyhf.schema.variables.schemas, raising=True + pyhf.schema.variables, "schemas", pyhf.schema.variables.schemas, raising=True ) old_path, old_cache = self_restoring_schema_globals - new_path = datadir / 'customschema' + new_path = datadir / "customschema" with pytest.raises(pyhf.exceptions.SchemaNotFound): with open( @@ -66,10 +66,10 @@ def test_schema_changeable(datadir, monkeypatch, self_restoring_schema_globals): def test_schema_changeable_context(datadir, monkeypatch, self_restoring_schema_globals): monkeypatch.setattr( - pyhf.schema.variables, 'schemas', pyhf.schema.variables.schemas, raising=True + pyhf.schema.variables, "schemas", pyhf.schema.variables.schemas, raising=True ) old_path, old_cache = self_restoring_schema_globals - new_path = datadir / 'customschema' + new_path = datadir / "customschema" assert old_path == pyhf.schema.path with pyhf.schema(new_path): @@ -88,10 +88,10 @@ def test_schema_changeable_context_error( datadir, monkeypatch, self_restoring_schema_globals ): monkeypatch.setattr( - pyhf.schema.variables, 'schemas', pyhf.schema.variables.schemas, raising=True + pyhf.schema.variables, "schemas", pyhf.schema.variables.schemas, raising=True ) old_path, old_cache = self_restoring_schema_globals - new_path = datadir / 'customschema' + new_path = datadir / "customschema" with pytest.raises(ZeroDivisionError): with pyhf.schema(new_path): @@ -104,23 +104,23 @@ def test_schema_changeable_context_error( def test_no_channels(): - spec = {'channels': []} + spec = {"channels": []} with pytest.raises(pyhf.exceptions.InvalidSpecification): pyhf.Model(spec) def test_no_samples(): - spec = {'channels': [{'name': 'channel', 'samples': []}]} + spec = {"channels": [{"name": "channel", "samples": []}]} with pytest.raises(pyhf.exceptions.InvalidSpecification): pyhf.Model(spec) def test_sample_missing_data(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [{'name': 'sample', 'data': [], 'modifiers': []}], + "name": "channel", + "samples": [{"name": "sample", "data": [], "modifiers": []}], } ] } @@ -130,7 +130,7 @@ def test_sample_missing_data(): def test_sample_missing_name(): spec = { - 'channels': [{'name': 'channel', 'samples': [{'data': [1], 'modifiers': []}]}] + "channels": [{"name": "channel", "samples": [{"data": [1], "modifiers": []}]}] } with pytest.raises(pyhf.exceptions.InvalidSpecification): pyhf.Model(spec) @@ -138,10 +138,10 @@ def test_sample_missing_name(): def test_sample_missing_all_modifiers(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [{'name': 'sample', 'data': [10.0], 'modifiers': []}], + "name": "channel", + "samples": [{"name": "sample", "data": [10.0], "modifiers": []}], } ] } @@ -151,39 +151,39 @@ def test_sample_missing_all_modifiers(): def test_one_sample_missing_modifiers(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ] } - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") def test_add_unknown_modifier(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'ttbar', - 'data': [1], - 'modifiers': [ + "name": "ttbar", + "data": [1], + "modifiers": [ { - 'name': 'a_name', - 'type': 'this_should_not_exist', - 'data': [1], + "name": "a_name", + "type": "this_should_not_exist", + "data": [1], } ], } @@ -197,18 +197,18 @@ def test_add_unknown_modifier(): def test_empty_staterror(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ + "name": "sample", + "data": [10.0], + "modifiers": [ { - 'name': 'staterror_channel', - 'type': 'staterror', - 'data': [], + "name": "staterror_channel", + "type": "staterror", + "data": [], } ], } @@ -222,15 +222,15 @@ def test_empty_staterror(): def test_empty_shapesys(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ - {'name': 'sample_norm', 'type': 'shapesys', 'data': []} + "name": "sample", + "data": [10.0], + "modifiers": [ + {"name": "sample_norm", "type": "shapesys", "data": []} ], } ], @@ -243,18 +243,18 @@ def test_empty_shapesys(): def test_empty_histosys(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ + "name": "sample", + "data": [10.0], + "modifiers": [ { - 'name': 'modifier', - 'type': 'histosys', - 'data': {'lo_data': [], 'hi_data': []}, + "name": "modifier", + "type": "histosys", + "data": {"lo_data": [], "hi_data": []}, } ], } @@ -268,22 +268,22 @@ def test_empty_histosys(): def test_additional_properties(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'fake_additional_property': 2, + "fake_additional_property": 2, } with pytest.raises(pyhf.exceptions.InvalidSpecification): pyhf.Model(spec) @@ -291,209 +291,209 @@ def test_additional_properties(): def test_parameters_definition(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [{'name': 'mypoi'}], + "parameters": [{"name": "mypoi"}], } - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") def test_parameters_incorrect_format(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': {'a': 'fake', 'object': 2}, + "parameters": {"a": "fake", "object": 2}, } with pytest.raises(pyhf.exceptions.InvalidSpecification): - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") def test_parameters_duplicated(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [{'name': 'mypoi'}, {'name': 'mypoi'}], + "parameters": [{"name": "mypoi"}, {"name": "mypoi"}], } with pytest.raises(pyhf.exceptions.InvalidModel): - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") def test_parameters_fixed(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ - {'name': 'unfixed', 'type': 'normfactor', 'data': None} + "name": "sample", + "data": [10.0], + "modifiers": [ + {"name": "unfixed", "type": "normfactor", "data": None} ], }, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [{'name': 'mypoi', 'inits': [1], 'fixed': True}], + "parameters": [{"name": "mypoi", "inits": [1], "fixed": True}], } - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") def test_parameters_all_props(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [{'name': 'mypoi', 'inits': [1], 'bounds': [[0, 1]]}], + "parameters": [{"name": "mypoi", "inits": [1], "bounds": [[0, 1]]}], } - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") @pytest.mark.parametrize( - 'bad_parameter', + "bad_parameter", [ - {'name': 'mypoi', 'inits': ['a']}, - {'name': 'mypoi', 'bounds': [0, 1]}, - {'name': 'mypoi', 'auxdata': ['a']}, - {'name': 'mypoi', 'factors': ['a']}, - {'name': 'mypoi', 'paramset_type': 'fake_paramset_type'}, - {'name': 'mypoi', 'n_parameters': 5}, - {'name': 'mypoi', 'op_code': 'fake_op_code'}, + {"name": "mypoi", "inits": ["a"]}, + {"name": "mypoi", "bounds": [0, 1]}, + {"name": "mypoi", "auxdata": ["a"]}, + {"name": "mypoi", "factors": ["a"]}, + {"name": "mypoi", "paramset_type": "fake_paramset_type"}, + {"name": "mypoi", "n_parameters": 5}, + {"name": "mypoi", "op_code": "fake_op_code"}, ], ids=[ - 'inits', - 'bounds', - 'auxdata', - 'factors', - 'paramset_type', - 'n_parameters', - 'op_code', + "inits", + "bounds", + "auxdata", + "factors", + "paramset_type", + "n_parameters", + "op_code", ], ) def test_parameters_bad_parameter(bad_parameter): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [bad_parameter], + "parameters": [bad_parameter], } with pytest.raises(pyhf.exceptions.InvalidSpecification): - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") @pytest.mark.parametrize( - 'bad_parameter', [{'name': 'mypoi', 'factors': [0.0]}], ids=['factors'] + "bad_parameter", [{"name": "mypoi", "factors": [0.0]}], ids=["factors"] ) def test_parameters_normfactor_bad_attribute(bad_parameter): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ - {'name': 'sample', 'data': [10.0], 'modifiers': []}, + "name": "channel", + "samples": [ + {"name": "sample", "data": [10.0], "modifiers": []}, { - 'name': 'another_sample', - 'data': [5.0], - 'modifiers': [ - {'name': 'mypoi', 'type': 'normfactor', 'data': None} + "name": "another_sample", + "data": [5.0], + "modifiers": [ + {"name": "mypoi", "type": "normfactor", "data": None} ], }, ], } ], - 'parameters': [bad_parameter], + "parameters": [bad_parameter], } with pytest.raises(pyhf.exceptions.InvalidModel): - pyhf.Model(spec, poi_name='mypoi') + pyhf.Model(spec, poi_name="mypoi") def test_histosys_additional_properties(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ + "name": "sample", + "data": [10.0], + "modifiers": [ { - 'name': 'histosys', - 'type': 'histosys', - 'data': { - 'hi_data': [1.0], - 'lo_data': [0.5], - 'foo': 2.0, + "name": "histosys", + "type": "histosys", + "data": { + "hi_data": [1.0], + "lo_data": [0.5], + "foo": 2.0, }, } ], @@ -508,18 +508,18 @@ def test_histosys_additional_properties(): def test_normsys_additional_properties(): spec = { - 'channels': [ + "channels": [ { - 'name': 'channel', - 'samples': [ + "name": "channel", + "samples": [ { - 'name': 'sample', - 'data': [10.0], - 'modifiers': [ + "name": "sample", + "data": [10.0], + "modifiers": [ { - 'name': 'normsys', - 'type': 'normsys', - 'data': {'hi': 1.0, 'lo': 0.5, 'foo': 2.0}, + "name": "normsys", + "type": "normsys", + "data": {"hi": 1.0, "lo": 0.5, "foo": 2.0}, } ], } @@ -532,7 +532,7 @@ def test_normsys_additional_properties(): @pytest.mark.parametrize( - 'patch', + "patch", [ {"op": "add", "path": "/foo/0/bar", "value": {"foo": [1.0]}}, {"op": "replace", "path": "/foo/0/bar", "value": {"foo": [1.0]}}, @@ -541,14 +541,14 @@ def test_normsys_additional_properties(): {"op": "move", "path": "/foo/0/bar", "from": "/foo/0/baz"}, {"op": "copy", "path": "/foo/0/bar", "from": "/foo/0/baz"}, ], - ids=['add', 'replace', 'test', 'remove', 'move', 'copy'], + ids=["add", "replace", "test", "remove", "move", "copy"], ) def test_jsonpatch(patch): - pyhf.schema.validate([patch], 'jsonpatch.json') + pyhf.schema.validate([patch], "jsonpatch.json") @pytest.mark.parametrize( - 'patch', + "patch", [ {"path": "/foo/0/bar"}, {"op": "add", "path": "/foo/0/bar", "from": {"foo": [1.0]}}, @@ -559,47 +559,47 @@ def test_jsonpatch(patch): {"op": "move", "from": "/foo/0/baz"}, ], ids=[ - 'noop', - 'add_from_novalue', - 'add_novalue', - 'add_nopath', - 'remove_nopath', - 'move_nofrom', - 'move_nopath', + "noop", + "add_from_novalue", + "add_novalue", + "add_nopath", + "remove_nopath", + "move_nofrom", + "move_nopath", ], ) def test_jsonpatch_fail(patch): with pytest.raises(pyhf.exceptions.InvalidSpecification): - pyhf.schema.validate([patch], 'jsonpatch.json') + pyhf.schema.validate([patch], "jsonpatch.json") -@pytest.mark.parametrize('patchset_file', ['patchset_good.json']) +@pytest.mark.parametrize("patchset_file", ["patchset_good.json"]) def test_patchset(datadir, patchset_file): with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: patchset = json.load(patch_file) - pyhf.schema.validate(patchset, 'patchset.json') + pyhf.schema.validate(patchset, "patchset.json") @pytest.mark.parametrize( - 'patchset_file', + "patchset_file", [ - 'patchset_bad_label_pattern.json', - 'patchset_bad_no_patch_name.json', - 'patchset_bad_empty_patches.json', - 'patchset_bad_no_patch_values.json', - 'patchset_bad_no_digests.json', - 'patchset_bad_no_description.json', - 'patchset_bad_no_labels.json', - 'patchset_bad_invalid_digests.json', - 'patchset_bad_hepdata_reference.json', - 'patchset_bad_no_version.json', + "patchset_bad_label_pattern.json", + "patchset_bad_no_patch_name.json", + "patchset_bad_empty_patches.json", + "patchset_bad_no_patch_values.json", + "patchset_bad_no_digests.json", + "patchset_bad_no_description.json", + "patchset_bad_no_labels.json", + "patchset_bad_invalid_digests.json", + "patchset_bad_hepdata_reference.json", + "patchset_bad_no_version.json", ], ) def test_patchset_fail(datadir, patchset_file): with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: patchset = json.load(patch_file) with pytest.raises(pyhf.exceptions.InvalidSpecification): - pyhf.schema.validate(patchset, 'patchset.json') + pyhf.schema.validate(patchset, "patchset.json") def test_defs_always_cached( @@ -611,31 +611,31 @@ def test_defs_always_cached( Otherwise pyhf will crash in contexts where the jsonschema.RefResolver cannot lookup the definition by the schema-id (e.g. a cluster node without network access). """ - modules_to_clear = [name for name in sys.modules if name.split('.')[0] == 'pyhf'] + modules_to_clear = [name for name in sys.modules if name.split(".")[0] == "pyhf"] for module_name in modules_to_clear: del sys.modules[module_name] - pyhf = importlib.import_module('pyhf') + pyhf = importlib.import_module("pyhf") spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': [10], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": [10], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': [20], - 'modifiers': [ + "name": "background", + "data": [20], + "modifiers": [ { - 'name': 'uncorr_bkguncrt', - 'type': 'shapesys', - 'data': [30], + "name": "uncorr_bkguncrt", + "type": "shapesys", + "data": [30], } ], }, @@ -643,7 +643,7 @@ def test_defs_always_cached( } ] } - pyhf.schema.validate(spec, 'model.json') # may try to access network and fail + pyhf.schema.validate(spec, "model.json") # may try to access network and fail def test_schema_tensor_type_allowed(backend): diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 7eef61c5cd..27f3fff6a7 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -25,30 +25,30 @@ def tarfile_path(tmp_path): def test_version(script_runner): - command = 'pyhf --version' + command = "pyhf --version" start = time.time() ret = script_runner.run(shlex.split(command)) end = time.time() elapsed = end - start assert ret.success assert pyhf.__version__ in ret.stdout - assert ret.stderr == '' + assert ret.stderr == "" # make sure it took less than a second assert elapsed < 1.0 @pytest.mark.parametrize("flag", ["--cite", "--citation"]) def test_citation(script_runner, flag): - command = f'pyhf {flag}' + command = f"pyhf {flag}" start = time.time() ret = script_runner.run(shlex.split(command)) end = time.time() elapsed = end - start assert ret.success - assert ret.stdout.startswith('@software{pyhf,') - assert '@article{pyhf_joss,' in ret.stdout + assert ret.stdout.startswith("@software{pyhf,") + assert "@article{pyhf_joss," in ret.stdout # ensure there's not \n\n at the end - assert ret.stdout.endswith('}\n') + assert ret.stdout.endswith("}\n") # make sure it took less than a second assert elapsed < 1.0 @@ -56,32 +56,32 @@ def test_citation(script_runner, flag): # see test_import.py for the same (detailed) test def test_import_prepHistFactory(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) assert ret.success - assert ret.stdout == '' - assert ret.stderr == '' + assert ret.stdout == "" + assert ret.stderr == "" parsed_xml = json.loads(temp.read_text()) - spec = {'channels': parsed_xml['channels']} - pyhf.schema.validate(spec, 'model.json') + spec = {"channels": parsed_xml["channels"]} + pyhf.schema.validate(spec, "model.json") def test_import_prepHistFactory_withProgress(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success - assert ret.stdout == '' - assert ret.stderr != '' + assert ret.stdout == "" + assert ret.stderr != "" def test_import_prepHistFactory_stdout(tmp_path, script_runner): - command = 'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/' + command = "pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/" ret = script_runner.run(shlex.split(command)) assert ret.success - assert ret.stdout != '' - assert ret.stderr != '' + assert ret.stdout != "" + assert ret.stderr != "" d = json.loads(ret.stdout) assert d @@ -127,69 +127,69 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): def test_import_prepHistFactory_and_cls(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp}' + command = f"pyhf cls {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) assert d - assert 'CLs_obs' in d - assert 'CLs_exp' in d + assert "CLs_obs" in d + assert "CLs_exp" in d for measurement in [ - 'GaussExample', - 'GammaExample', - 'LogNormExample', - 'ConstExample', + "GaussExample", + "GammaExample", + "LogNormExample", + "ConstExample", ]: - command = f'pyhf cls {temp} --measurement {measurement:s}' + command = f"pyhf cls {temp} --measurement {measurement:s}" ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) assert d - assert 'CLs_obs' in d - assert 'CLs_exp' in d + assert "CLs_obs" in d + assert "CLs_exp" in d - tmp_out = tmp_path.joinpath(f'{measurement:s}_output.json') + tmp_out = tmp_path.joinpath(f"{measurement:s}_output.json") # make sure output file works too - command += f' --output-file {tmp_out}' + command += f" --output-file {tmp_out}" ret = script_runner.run(shlex.split(command)) assert ret.success d = json.load(tmp_out.open()) - assert 'CLs_obs' in d - assert 'CLs_exp' in d + assert "CLs_obs" in d + assert "CLs_exp" in d def test_import_usingMounts(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp} {data.joinpath("config/example.xml")}' + command = f"pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp} {data.joinpath('config/example.xml')}" ret = script_runner.run(shlex.split(command)) assert ret.success - assert ret.stdout == '' - assert ret.stderr == '' + assert ret.stdout == "" + assert ret.stderr == "" parsed_xml = json.loads(temp.read_text()) - spec = {'channels': parsed_xml['channels']} - pyhf.schema.validate(spec, 'model.json') + spec = {"channels": parsed_xml["channels"]} + pyhf.schema.validate(spec, "model.json") def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp} {data.joinpath("config/example.xml")}' + command = f"pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp} {data.joinpath('config/example.xml')}" ret = script_runner.run(shlex.split(command)) assert not ret.success - assert ret.stdout == '' - assert 'is not a valid colon-separated option' in ret.stderr + assert ret.stdout == "" + assert "is not a valid colon-separated option" in ret.stderr @pytest.mark.parametrize("backend", ["numpy", "jax"]) @@ -210,22 +210,22 @@ def test_fit_backend_option(tmp_path, script_runner, backend): @pytest.mark.parametrize("backend", ["numpy", "jax"]) def test_cls_backend_option(tmp_path, script_runner, backend): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls --backend {backend:s} {temp}' + command = f"pyhf cls --backend {backend:s} {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) assert d - assert 'CLs_obs' in d - assert 'CLs_exp' in d + assert "CLs_obs" in d + assert "CLs_exp" in d def test_import_and_export(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) output_dir_path = tmp_path / "output" @@ -237,19 +237,19 @@ def test_import_and_export(tmp_path, script_runner): def test_patch(tmp_path, script_runner): - patch = tmp_path.joinpath('patch.json') + patch = tmp_path.joinpath("patch.json") patch.write_text( - ''' + """ [{"op": "replace", "path": "/channels/0/samples/0/data", "value": [5,6]}] - ''' + """ ) temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp} --patch {patch}' + command = f"pyhf cls {temp} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -260,7 +260,7 @@ def test_patch(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - command = f'pyhf cls {temp} --patch -' + command = f"pyhf cls {temp} --patch -" ret = script_runner.run(shlex.split(command), stdin=patch.open()) assert ret.success @@ -274,15 +274,15 @@ def test_patch(tmp_path, script_runner): def test_patch_fail(tmp_path, script_runner): - patch = tmp_path.joinpath('patch.json') + patch = tmp_path.joinpath("patch.json") - patch.write_text('''not,json''') + patch.write_text("""not,json""") temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp} --patch {patch}' + command = f"pyhf cls {temp} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert not ret.success @@ -296,7 +296,7 @@ def test_patch_fail(tmp_path, script_runner): def test_bad_measurement_name(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp} --measurement "a-fake-measurement-name"' @@ -307,24 +307,24 @@ def test_bad_measurement_name(tmp_path, script_runner): def test_testpoi(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) pois = [1.0, 0.5, 0.001] results_exp = [] results_obs = [] for test_poi in pois: - command = f'pyhf cls {temp} --test-poi {test_poi:f}' + command = f"pyhf cls {temp} --test-poi {test_poi:f}" ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) assert d - assert 'CLs_obs' in d - assert 'CLs_exp' in d + assert "CLs_obs" in d + assert "CLs_exp" in d - results_exp.append(d['CLs_exp']) - results_obs.append(d['CLs_obs']) + results_exp.append(d["CLs_exp"]) + results_obs.append(d["CLs_obs"]) import itertools @@ -352,17 +352,17 @@ def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): assert ret.success == success -@pytest.mark.parametrize('optimizer', ['scipy', 'minuit']) +@pytest.mark.parametrize("optimizer", ["scipy", "minuit"]) @pytest.mark.parametrize( - 'opts,success', [(['maxiter=1000'], True), (['maxiter=1'], False)] + "opts,success", [(["maxiter=1000"], True), (["maxiter=1"], False)] ) def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) - command = f'pyhf cls {temp} --optimizer {optimizer} {optconf}' + command = f"pyhf cls {temp} --optimizer {optimizer} {optconf}" ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -370,44 +370,44 @@ def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): def test_inspect(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) - command = f'pyhf inspect {temp}' + command = f"pyhf inspect {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success def test_inspect_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("inspect_output.json") - command = f'pyhf inspect {temp} --output-file {tempout}' + command = f"pyhf inspect {temp} --output-file {tempout}" ret = script_runner.run(shlex.split(command)) assert ret.success summary = json.loads(tempout.read_text()) assert [ - 'channels', - 'measurements', - 'modifiers', - 'parameters', - 'samples', - 'systematics', + "channels", + "measurements", + "modifiers", + "parameters", + "samples", + "systematics", ] == sorted(summary) - assert len(summary['channels']) == 1 - assert len(summary['measurements']) == 4 - assert len(summary['modifiers']) == 6 - assert len(summary['parameters']) == 6 - assert len(summary['samples']) == 3 - assert len(summary['systematics']) == 6 + assert len(summary["channels"]) == 1 + assert len(summary["measurements"]) == 4 + assert len(summary["modifiers"]) == 6 + assert len(summary["parameters"]) == 6 + assert len(summary["samples"]) == 3 + assert len(summary["systematics"]) == 6 def test_prune(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) command = f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp}" @@ -417,82 +417,82 @@ def test_prune(tmp_path, script_runner): def test_prune_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("prune_output.json") - command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp} --output-file {tempout}' + command = f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp} --output-file {tempout}" ret = script_runner.run(shlex.split(command)) assert ret.success spec = json.loads(temp.read_text()) ws = pyhf.Workspace(spec) - assert 'GammaExample' in ws.measurement_names - assert 'staterror_channel1' in ws.model().config.parameters + assert "GammaExample" in ws.measurement_names + assert "staterror_channel1" in ws.model().config.parameters pruned_spec = json.loads(tempout.read_text()) pruned_ws = pyhf.Workspace(pruned_spec) - assert 'GammaExample' not in pruned_ws.measurement_names - assert 'staterror_channel1' not in pruned_ws.model().config.parameters + assert "GammaExample" not in pruned_ws.measurement_names + assert "staterror_channel1" not in pruned_ws.model().config.parameters def test_rename(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp}' + command = f"pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success def test_rename_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("rename_output.json") - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp} --output-file {tempout}' + command = f"pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp} --output-file {tempout}" ret = script_runner.run(shlex.split(command)) assert ret.success spec = json.loads(temp.read_text()) ws = pyhf.Workspace(spec) - assert 'GammaExample' in ws.measurement_names - assert 'GamEx' not in ws.measurement_names - assert 'staterror_channel1' in ws.model().config.parameters - assert 'staterror_channelone' not in ws.model().config.parameters + assert "GammaExample" in ws.measurement_names + assert "GamEx" not in ws.measurement_names + assert "staterror_channel1" in ws.model().config.parameters + assert "staterror_channelone" not in ws.model().config.parameters renamed_spec = json.loads(tempout.read_text()) renamed_ws = pyhf.Workspace(renamed_spec) - assert 'GammaExample' not in renamed_ws.measurement_names - assert 'GamEx' in renamed_ws.measurement_names - assert 'staterror_channel1' not in renamed_ws.model().config.parameters - assert 'staterror_channelone' in renamed_ws.model().config.parameters + assert "GammaExample" not in renamed_ws.measurement_names + assert "GamEx" in renamed_ws.measurement_names + assert "staterror_channel1" not in renamed_ws.model().config.parameters + assert "staterror_channelone" in renamed_ws.model().config.parameters def test_combine(tmp_path, script_runner): temp_1 = tmp_path.joinpath("parsed_output.json") temp_2 = tmp_path.joinpath("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress" ret = script_runner.run(shlex.split(command)) - rename_channels = {'channel1': 'channel2'} + rename_channels = {"channel1": "channel2"} rename_measurements = { - 'ConstExample': 'OtherConstExample', - 'LogNormExample': 'OtherLogNormExample', - 'GaussExample': 'OtherGaussExample', - 'GammaExample': 'OtherGammaExample', + "ConstExample": "OtherConstExample", + "LogNormExample": "OtherLogNormExample", + "GaussExample": "OtherGaussExample", + "GammaExample": "OtherGammaExample", } - _opts_channels = ''.join( - ' -c ' + ' '.join(item) for item in rename_channels.items() + _opts_channels = "".join( + " -c " + " ".join(item) for item in rename_channels.items() ) - _opts_measurements = ''.join( - ' --measurement ' + ' '.join(item) for item in rename_measurements.items() + _opts_measurements = "".join( + " --measurement " + " ".join(item) for item in rename_measurements.items() ) command = f"pyhf rename {temp_1} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf combine {temp_1} {temp_2}' + command = f"pyhf combine {temp_1} {temp_2}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -500,45 +500,45 @@ def test_combine(tmp_path, script_runner): def test_combine_outfile(tmp_path, script_runner): temp_1 = tmp_path.joinpath("parsed_output.json") temp_2 = tmp_path.joinpath("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress" ret = script_runner.run(shlex.split(command)) - rename_channels = {'channel1': 'channel2'} + rename_channels = {"channel1": "channel2"} rename_measurements = { - 'ConstExample': 'OtherConstExample', - 'LogNormExample': 'OtherLogNormExample', - 'GaussExample': 'OtherGaussExample', - 'GammaExample': 'OtherGammaExample', + "ConstExample": "OtherConstExample", + "LogNormExample": "OtherLogNormExample", + "GaussExample": "OtherGaussExample", + "GammaExample": "OtherGammaExample", } - _opts_channels = ''.join( - ' -c ' + ' '.join(item) for item in rename_channels.items() + _opts_channels = "".join( + " -c " + " ".join(item) for item in rename_channels.items() ) - _opts_measurements = ''.join( - ' --measurement ' + ' '.join(item) for item in rename_measurements.items() + _opts_measurements = "".join( + " --measurement " + " ".join(item) for item in rename_measurements.items() ) command = f"pyhf rename {temp_1} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("combined_output.json") - command = f'pyhf combine {temp_1} {temp_2} --output-file {tempout}' + command = f"pyhf combine {temp_1} {temp_2} --output-file {tempout}" ret = script_runner.run(shlex.split(command)) assert ret.success combined_spec = json.loads(tempout.read_text()) combined_ws = pyhf.Workspace(combined_spec) - assert combined_ws.channels == ['channel1', 'channel2'] + assert combined_ws.channels == ["channel1", "channel2"] assert len(combined_ws.measurement_names) == 8 def test_combine_merge_channels(tmp_path, script_runner): temp_1 = tmp_path.joinpath("parsed_output.json") temp_2 = tmp_path.joinpath("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress" ret = script_runner.run(shlex.split(command)) assert ret.success - command = f'pyhf prune {temp_1} --sample signal --output-file {temp_2}' + command = f"pyhf prune {temp_1} --sample signal --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -548,18 +548,18 @@ def test_combine_merge_channels(tmp_path, script_runner): assert ret.success -@pytest.mark.parametrize('do_json', [False, True]) +@pytest.mark.parametrize("do_json", [False, True]) @pytest.mark.parametrize( - 'algorithms', [['md5'], ['sha256'], ['sha256', 'md5'], ['sha256', 'md5']] + "algorithms", [["md5"], ["sha256"], ["sha256", "md5"], ["sha256", "md5"]] ) def test_workspace_digest(tmp_path, script_runner, algorithms, do_json): results = { - 'md5': '7de8930ff37e5a4f6a31da11bda7813f', - 'sha256': '6d416ee67a40460499ea2ef596fb1e682a563d7df06e690018a211d35238aecc', + "md5": "7de8930ff37e5a4f6a31da11bda7813f", + "sha256": "6d416ee67a40460499ea2ef596fb1e682a563d7df06e690018a211d35238aecc", } temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) command = ( @@ -575,12 +575,12 @@ def test_workspace_digest(tmp_path, script_runner, algorithms, do_json): indent=4, ) else: - expected_output = '\n'.join( + expected_output = "\n".join( f"{algorithm}:{results[algorithm]}" for algorithm in algorithms ) - assert ret.stdout == expected_output + '\n' - assert ret.stderr == '' + assert ret.stdout == expected_output + "\n" + assert ret.stderr == "" if do_json: assert json.loads(ret.stdout) == { @@ -599,19 +599,19 @@ def test_patchset_download( tmp_path, script_runner, requests_mock, tarfile_path, archive ): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) - command = f'pyhf contrib download {archive} {tmp_path.joinpath("likelihoods")}' + command = f"pyhf contrib download {archive} {tmp_path.joinpath('likelihoods')}" ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags - command = f'pyhf contrib download --verbose --force {archive} {tmp_path.joinpath("likelihoods")}' + command = f"pyhf contrib download --verbose --force {archive} {tmp_path.joinpath('likelihoods')}" ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) - command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.joinpath("likelihoods")}' + command = f"pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.joinpath('likelihoods')}" ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( @@ -623,7 +623,7 @@ def test_patchset_download( requests_mock.get( "https://httpstat.us/404/record/resource/1234567", status_code=404 ) - command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.joinpath("likelihoods")}' + command = f"pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.joinpath('likelihoods')}" ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -693,16 +693,16 @@ def test_missing_contrib_download(caplog): def test_patchset_inspect(datadir, script_runner): - command = f'pyhf patchset inspect {datadir.joinpath("example_patchset.json")}' + command = f"pyhf patchset inspect {datadir.joinpath('example_patchset.json')}" ret = script_runner.run(shlex.split(command)) - assert 'patch_channel1_signal_syst1' in ret.stdout + assert "patch_channel1_signal_syst1" in ret.stdout -@pytest.mark.parametrize('output_file', [False, True]) -@pytest.mark.parametrize('with_metadata', [False, True]) +@pytest.mark.parametrize("output_file", [False, True]) +@pytest.mark.parametrize("with_metadata", [False, True]) def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_metadata): temp = tmp_path.joinpath("extracted_output.json") - command = f'pyhf patchset extract {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' + command = f"pyhf patchset extract {datadir.joinpath('example_patchset.json')} --name patch_channel1_signal_syst1" if output_file: command += f" --output-file {temp}" if with_metadata: @@ -716,7 +716,7 @@ def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_me else: extracted_output = json.loads(ret.stdout) if with_metadata: - assert 'metadata' in extracted_output + assert "metadata" in extracted_output else: assert ( extracted_output @@ -727,17 +727,17 @@ def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_me def test_patchset_verify(datadir, script_runner): - command = f'pyhf patchset verify {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")}' + command = f"pyhf patchset verify {datadir.joinpath('example_bkgonly.json')} {datadir.joinpath('example_patchset.json')}" ret = script_runner.run(shlex.split(command)) assert ret.success - assert 'All good' in ret.stdout + assert "All good" in ret.stdout -@pytest.mark.parametrize('output_file', [False, True]) +@pytest.mark.parametrize("output_file", [False, True]) def test_patchset_apply(datadir, tmp_path, script_runner, output_file): temp = tmp_path.joinpath("patched_output.json") - command = f'pyhf patchset apply {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' + command = f"pyhf patchset apply {datadir.joinpath('example_bkgonly.json')} {datadir.joinpath('example_patchset.json')} --name patch_channel1_signal_syst1" if output_file: command += f" --output-file {temp}" @@ -748,7 +748,7 @@ def test_patchset_apply(datadir, tmp_path, script_runner, output_file): extracted_output = json.loads(temp.read_text()) else: extracted_output = json.loads(ret.stdout) - assert extracted_output['channels'][0]['samples'][0]['modifiers'][0]['data'] == { + assert extracted_output["channels"][0]["samples"][0]["modifiers"][0]["data"] == { "hi": 1.2, "lo": 0.8, } @@ -756,10 +756,10 @@ def test_patchset_apply(datadir, tmp_path, script_runner, output_file): def test_sort(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) - command = f'pyhf sort {temp}' + command = f"pyhf sort {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -767,11 +767,11 @@ def test_sort(tmp_path, script_runner): def test_sort_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress" ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("sort_output.json") - command = f'pyhf sort {temp} --output-file {tempout}' + command = f"pyhf sort {temp} --output-file {tempout}" ret = script_runner.run(shlex.split(command)) assert ret.success diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index 2738e3fb95..358f643df6 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -3,7 +3,7 @@ import pyhf -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def default_backend(backend): pyhf.set_backend(*backend, default=True) yield backend @@ -31,9 +31,9 @@ def test_uncorrelated_background(backend): assert model.config.samples == ["background", "signal"] assert model.config.par_order == ["mu", "uncorr_bkguncrt"] assert model.config.par_names == [ - 'mu', - 'uncorr_bkguncrt[0]', - 'uncorr_bkguncrt[1]', + "mu", + "uncorr_bkguncrt[0]", + "uncorr_bkguncrt[1]", ] assert model.config.suggested_init() == [1.0, 1.0, 1.0] @@ -64,8 +64,8 @@ def test_uncorrelated_background_default_backend(default_backend): assert model.config.samples == ["background", "signal"] assert model.config.par_order == ["mu", "uncorr_bkguncrt"] assert model.config.par_names == [ - 'mu', - 'uncorr_bkguncrt[0]', - 'uncorr_bkguncrt[1]', + "mu", + "uncorr_bkguncrt[0]", + "uncorr_bkguncrt[1]", ] assert model.config.suggested_init() == [1.0, 1.0, 1.0] diff --git a/tests/test_tensor.py b/tests/test_tensor.py index 448cedadc2..dc6029d618 100644 --- a/tests/test_tensor.py +++ b/tests/test_tensor.py @@ -9,10 +9,10 @@ def test_astensor_dtype(backend, caplog): tb = pyhf.tensorlib - with caplog.at_level(logging.INFO, 'pyhf.tensor'): + with caplog.at_level(logging.INFO, "pyhf.tensor"): with pytest.raises(KeyError): - assert tb.astensor([1, 2, 3], dtype='long') - assert 'Invalid dtype' in caplog.text + assert tb.astensor([1, 2, 3], dtype="long") + assert "Invalid dtype" in caplog.text def test_ones_dtype(backend, caplog): @@ -211,15 +211,15 @@ def test_reshape(backend): def test_swap(backend): tb = pyhf.tensorlib - assert tb.tolist(tb.einsum('ij...->ji...', tb.astensor([[1, 2, 3]]))) == [ + assert tb.tolist(tb.einsum("ij...->ji...", tb.astensor([[1, 2, 3]]))) == [ [1], [2], [3], ] - assert tb.tolist(tb.einsum('ij...->ji...', tb.astensor([[[1, 2, 3]]]))) == [ + assert tb.tolist(tb.einsum("ij...->ji...", tb.astensor([[[1, 2, 3]]]))) == [ [[1, 2, 3]] ] - assert tb.tolist(tb.einsum('ijk...->kji...', tb.astensor([[[1, 2, 3]]]))) == [ + assert tb.tolist(tb.einsum("ijk...->kji...", tb.astensor([[[1, 2, 3]]]))) == [ [[1]], [[2]], [[3]], @@ -298,13 +298,13 @@ def test_boolean_mask(backend): assert tb.tolist( tb.boolean_mask( tb.astensor([1, 2, 3, 4, 5, 6]), - tb.astensor([True, True, False, True, False, False], dtype='bool'), + tb.astensor([True, True, False, True, False, False], dtype="bool"), ) ) == [1, 2, 4] assert tb.tolist( tb.boolean_mask( tb.astensor([[1, 2], [3, 4], [5, 6]]), - tb.astensor([[True, True], [False, True], [False, False]], dtype='bool'), + tb.astensor([[True, True], [False, True], [False, False]], dtype="bool"), ) ) == [1, 2, 4] @@ -350,12 +350,12 @@ def test_1D_gather(backend): tb = pyhf.tensorlib assert tb.tolist( tb.gather( - tb.astensor([1, 2, 3, 4, 5, 6]), tb.astensor([4, 0, 3, 2], dtype='int') + tb.astensor([1, 2, 3, 4, 5, 6]), tb.astensor([4, 0, 3, 2], dtype="int") ) ) == [5, 1, 4, 3] assert tb.tolist( tb.gather( - tb.astensor([1, 2, 3, 4, 5, 6]), tb.astensor([[4, 0], [3, 2]], dtype='int') + tb.astensor([1, 2, 3, 4, 5, 6]), tb.astensor([[4, 0], [3, 2]], dtype="int") ) ) == [[5, 1], [4, 3]] @@ -364,7 +364,7 @@ def test_ND_gather(backend): tb = pyhf.tensorlib assert tb.tolist( tb.gather( - tb.astensor([[1, 2], [3, 4], [5, 6]]), tb.astensor([1, 0], dtype='int') + tb.astensor([[1, 2], [3, 4], [5, 6]]), tb.astensor([1, 0], dtype="int") ) ) == [[3, 4], [1, 2]] @@ -383,10 +383,10 @@ def test_einsum(backend): x = np.arange(20).reshape(5, 4).tolist() assert np.all( - tb.tolist(tb.einsum('ij->ji', tb.astensor(x))) == np.asarray(x).T.tolist() + tb.tolist(tb.einsum("ij->ji", tb.astensor(x))) == np.asarray(x).T.tolist() ) assert ( - tb.tolist(tb.einsum('i,j->ij', tb.astensor([1, 1, 1]), tb.astensor([1, 2, 3]))) + tb.tolist(tb.einsum("i,j->ij", tb.astensor([1, 1, 1]), tb.astensor([1, 2, 3]))) == [[1, 2, 3]] * 3 ) @@ -417,27 +417,27 @@ def test_pdf_eval(backend): }, } spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'histosys', - 'data': { - 'lo_data': source['bindata']['bkgsys_dn'], - 'hi_data': source['bindata']['bkgsys_up'], + "name": "bkg_norm", + "type": "histosys", + "data": { + "lo_data": source["bindata"]["bkgsys_dn"], + "hi_data": source["bindata"]["bkgsys_up"], }, } ], @@ -447,7 +447,7 @@ def test_pdf_eval(backend): ] } pdf = pyhf.Model(spec) - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata assert pytest.approx([-17.648827643136507], rel=5e-5) == pyhf.tensorlib.tolist( pdf.logpdf(pdf.config.suggested_init(), data) ) @@ -465,9 +465,9 @@ def test_pdf_eval_2(backend): } pdf = uncorrelated_background( - source['bindata']['sig'], source['bindata']['bkg'], source['bindata']['bkgerr'] + source["bindata"]["sig"], source["bindata"]["bkg"], source["bindata"]["bkgerr"] ) - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata assert pytest.approx([-23.579605171119738], rel=5e-5) == pyhf.tensorlib.tolist( pdf.logpdf(pdf.config.suggested_init(), data) @@ -476,31 +476,31 @@ def test_pdf_eval_2(backend): def test_tensor_precision(backend): tb, _ = backend - assert tb.precision in ['32b', '64b'] + assert tb.precision in ["32b", "64b"] @pytest.mark.parametrize( - 'tensorlib', - ['numpy_backend', 'jax_backend'], + "tensorlib", + ["numpy_backend", "jax_backend"], ) -@pytest.mark.parametrize('precision', ['64b', '32b']) +@pytest.mark.parametrize("precision", ["64b", "32b"]) def test_set_tensor_precision(tensorlib, precision): tb = getattr(pyhf.tensor, tensorlib)(precision=precision) assert tb.precision == precision # check for float64/int64/float32/int32 in the dtypemap by looking at the class names # - may break if class names stop including this, but i doubt it - assert f'float{precision[:1]}' in str(tb.dtypemap['float']) - assert f'int{precision[:1]}' in str(tb.dtypemap['int']) + assert f"float{precision[:1]}" in str(tb.dtypemap["float"]) + assert f"int{precision[:1]}" in str(tb.dtypemap["int"]) def test_trigger_tensorlib_changed_name(mocker): - numpy_64 = pyhf.tensor.numpy_backend(precision='64b') - jax_64 = pyhf.tensor.jax_backend(precision='64b') + numpy_64 = pyhf.tensor.numpy_backend(precision="64b") + jax_64 = pyhf.tensor.jax_backend(precision="64b") pyhf.set_backend(numpy_64) func = mocker.Mock() - pyhf.events.subscribe('tensorlib_changed')(func.__call__) + pyhf.events.subscribe("tensorlib_changed")(func.__call__) assert func.call_count == 0 pyhf.set_backend(jax_64) @@ -508,13 +508,13 @@ def test_trigger_tensorlib_changed_name(mocker): def test_trigger_tensorlib_changed_precision(mocker): - jax_64 = pyhf.tensor.jax_backend(precision='64b') - jax_32 = pyhf.tensor.jax_backend(precision='32b') + jax_64 = pyhf.tensor.jax_backend(precision="64b") + jax_32 = pyhf.tensor.jax_backend(precision="32b") pyhf.set_backend(jax_64) func = mocker.Mock() - pyhf.events.subscribe('tensorlib_changed')(func.__call__) + pyhf.events.subscribe("tensorlib_changed")(func.__call__) assert func.call_count == 0 pyhf.set_backend(jax_32) @@ -522,10 +522,10 @@ def test_trigger_tensorlib_changed_precision(mocker): @pytest.mark.parametrize( - 'tensorlib', - ['numpy_backend', 'jax_backend'], + "tensorlib", + ["numpy_backend", "jax_backend"], ) -@pytest.mark.parametrize('precision', ['64b', '32b']) +@pytest.mark.parametrize("precision", ["64b", "32b"]) def test_tensorlib_setup(tensorlib, precision, mocker): tb = getattr(pyhf.tensor, tensorlib)(precision=precision) diff --git a/tests/test_tensorviewer.py b/tests/test_tensorviewer.py index 1acc80af71..30f62dd2bf 100644 --- a/tests/test_tensorviewer.py +++ b/tests/test_tensorviewer.py @@ -5,21 +5,21 @@ def test_tensorviewer(backend): tb, _ = backend tv = _TensorViewer( [tb.astensor([0, 4, 5]), tb.astensor([1, 2, 3]), tb.astensor([6])], - names=['zzz', 'aaa', 'x'], + names=["zzz", "aaa", "x"], ) - data = tb.astensor(tb.astensor(list(range(7))) * 10, dtype='int') + data = tb.astensor(tb.astensor(list(range(7))) * 10, dtype="int") - a = [tb.tolist(x) for x in tv.split(data, selection=['aaa'])] + a = [tb.tolist(x) for x in tv.split(data, selection=["aaa"])] assert a == [[10, 20, 30]] - a = [tb.tolist(x) for x in tv.split(data, selection=['aaa', 'zzz'])] + a = [tb.tolist(x) for x in tv.split(data, selection=["aaa", "zzz"])] assert a == [[10, 20, 30], [0, 40, 50]] - a = [tb.tolist(x) for x in tv.split(data, selection=['zzz', 'aaa'])] + a = [tb.tolist(x) for x in tv.split(data, selection=["zzz", "aaa"])] assert a == [[0, 40, 50], [10, 20, 30]] - a = [tb.tolist(x) for x in tv.split(data, selection=['x', 'aaa'])] + a = [tb.tolist(x) for x in tv.split(data, selection=["x", "aaa"])] assert a == [[60], [10, 20, 30]] a = [tb.tolist(x) for x in tv.split(data, selection=[])] @@ -29,11 +29,11 @@ def test_tensorviewer(backend): assert a == [[0, 40, 50], [10, 20, 30], [60]] subviewer = _TensorViewer( - [tb.astensor([0]), tb.astensor([1, 2, 3])], names=['x', 'aaa'] + [tb.astensor([0]), tb.astensor([1, 2, 3])], names=["x", "aaa"] ) - assert tb.tolist(subviewer.stitch(tv.split(data, ['x', 'aaa']))) == [60, 10, 20, 30] + assert tb.tolist(subviewer.stitch(tv.split(data, ["x", "aaa"]))) == [60, 10, 20, 30] subviewer = _TensorViewer( - [tb.astensor([0, 1, 2]), tb.astensor([3])], names=['aaa', 'x'] + [tb.astensor([0, 1, 2]), tb.astensor([3])], names=["aaa", "x"] ) - assert tb.tolist(subviewer.stitch(tv.split(data, ['aaa', 'x']))) == [10, 20, 30, 60] + assert tb.tolist(subviewer.stitch(tv.split(data, ["aaa", "x"]))) == [10, 20, 30, 60] diff --git a/tests/test_utils.py b/tests/test_utils.py index a7800a742d..0b62d070cc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,16 +4,16 @@ @pytest.mark.parametrize( - 'opts,obj', + "opts,obj", [ - (['a=10'], {'a': 10}), - (['b=test'], {'b': 'test'}), - (['c=1.0e-8'], {'c': 1.0e-8}), - (['d=3.14'], {'d': 3.14}), - (['e=True'], {'e': True}), - (['f=false'], {'f': False}), - (['a=b', 'c=d'], {'a': 'b', 'c': 'd'}), - (['g=h=i'], {'g': 'h=i'}), + (["a=10"], {"a": 10}), + (["b=test"], {"b": "test"}), + (["c=1.0e-8"], {"c": 1.0e-8}), + (["d=3.14"], {"d": 3.14}), + (["e=True"], {"e": True}), + (["f=false"], {"f": False}), + (["a=b", "c=d"], {"a": "b", "c": "d"}), + (["g=h=i"], {"g": "h=i"}), ], ) def test_options_from_eqdelimstring(opts, obj): @@ -21,18 +21,18 @@ def test_options_from_eqdelimstring(opts, obj): @pytest.mark.parametrize( - 'obj', + "obj", [ - {'a': 2.0, 'b': 1.0, 'c': 'a'}, - {'b': 1.0, 'c': 'a', 'a': 2.0}, - {'c': 'a', 'a': 2.0, 'b': 1.0}, + {"a": 2.0, "b": 1.0, "c": "a"}, + {"b": 1.0, "c": "a", "a": 2.0}, + {"c": "a", "a": 2.0, "b": 1.0}, ], ) -@pytest.mark.parametrize('algorithm', ['md5', 'sha256']) +@pytest.mark.parametrize("algorithm", ["md5", "sha256"]) def test_digest(obj, algorithm): results = { - 'md5': '155e52b05179a1106d71e5e053452517', - 'sha256': '03dfbceade79855fc9b4e4d6fbd4f437109de68330dab37c3091a15f4bffe593', + "md5": "155e52b05179a1106d71e5e053452517", + "sha256": "03dfbceade79855fc9b4e4d6fbd4f437109de68330dab37c3091a15f4bffe593", } assert pyhf.utils.digest(obj, algorithm=algorithm) == results[algorithm] @@ -44,12 +44,12 @@ def test_digest_bad_obj(): def test_digest_bad_alg(): with pytest.raises(ValueError, match="nonexistent_algorithm"): - pyhf.utils.digest({}, algorithm='nonexistent_algorithm') + pyhf.utils.digest({}, algorithm="nonexistent_algorithm") -@pytest.mark.parametrize('oneline', [False, True]) +@pytest.mark.parametrize("oneline", [False, True]) def test_citation(oneline): citation = pyhf.utils.citation(oneline) assert citation if oneline: - assert '\n' not in citation + assert "\n" not in citation diff --git a/tests/test_validation.py b/tests/test_validation.py index 0c5b1bcc18..a36e445b42 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -8,35 +8,35 @@ import pyhf.writexml -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_1bin_shapesys(): with open("validation/data/1bin_example1.json", encoding="utf-8") as read_json: return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_1bin_shapesys(source_1bin_shapesys): source = source_1bin_shapesys spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'uncorr_bkguncrt', - 'type': 'shapesys', - 'data': source['bindata']['bkgerr'], + "name": "uncorr_bkguncrt", + "type": "shapesys", + "data": source["bindata"]["bkgerr"], } ], }, @@ -47,7 +47,7 @@ def spec_1bin_shapesys(source_1bin_shapesys): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_1bin_shapesys(): expected_result = { "exp": [ @@ -62,7 +62,7 @@ def expected_result_1bin_shapesys(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_1bin_shapesys_q0(): with open("validation/data/1bin_example1_q0.json", encoding="utf-8") as read_json: return json.load(read_json) @@ -71,29 +71,29 @@ def source_1bin_shapesys_q0(): source_1bin_shapesys_q0_toys = source_1bin_shapesys_q0 -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_1bin_shapesys_q0(source_1bin_shapesys_q0): source = source_1bin_shapesys_q0 spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'uncorr_bkguncrt', - 'type': 'shapesys', - 'data': source['bindata']['bkgerr'], + "name": "uncorr_bkguncrt", + "type": "shapesys", + "data": source["bindata"]["bkgerr"], } ], }, @@ -107,7 +107,7 @@ def spec_1bin_shapesys_q0(source_1bin_shapesys_q0): spec_1bin_shapesys_q0_toys = spec_1bin_shapesys_q0 -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_1bin_shapesys_q0(): expected_result = { "exp": [ @@ -122,7 +122,7 @@ def expected_result_1bin_shapesys_q0(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_1bin_shapesys_q0_toys(): expected_result = { "exp": [0.0, 0.0, 0.0135, 0.1365, 0.39854497], @@ -131,13 +131,13 @@ def expected_result_1bin_shapesys_q0_toys(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_1bin_lumi(): with open("validation/data/1bin_lumi.json", encoding="utf-8") as read_json: return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_1bin_lumi(source_1bin_lumi): source = source_1bin_lumi spec = { @@ -147,19 +147,19 @@ def spec_1bin_lumi(source_1bin_lumi): "samples": [ { "name": "signal", - "data": source['bindata']['sig'], + "data": source["bindata"]["sig"], "modifiers": [ {"data": None, "name": "mu", "type": "normfactor"} ], }, { "name": "background1", - "data": source['bindata']['bkg1'], + "data": source["bindata"]["bkg1"], "modifiers": [{"data": None, "name": "lumi", "type": "lumi"}], }, { "name": "background2", - "data": source['bindata']['bkg2'], + "data": source["bindata"]["bkg2"], "modifiers": [{"data": None, "name": "lumi", "type": "lumi"}], }, ], @@ -178,7 +178,7 @@ def spec_1bin_lumi(source_1bin_lumi): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_1bin_lumi(): expected_result = { "exp": [ @@ -193,35 +193,35 @@ def expected_result_1bin_lumi(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_1bin_normsys(): with open("validation/data/1bin_normsys.json", encoding="utf-8") as read_json: return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_1bin_normsys(source_1bin_normsys): source = source_1bin_normsys spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'normsys', - 'data': {'lo': 0.90, 'hi': 1.10}, + "name": "bkg_norm", + "type": "normsys", + "data": {"lo": 0.90, "hi": 1.10}, } ], }, @@ -232,7 +232,7 @@ def spec_1bin_normsys(source_1bin_normsys): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_1bin_normsys(): expected_result = { "exp": [ @@ -247,7 +247,7 @@ def expected_result_1bin_normsys(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_2bin_histosys(): with open( "validation/data/2bin_histosys_example2.json", encoding="utf-8" @@ -255,31 +255,31 @@ def source_2bin_histosys(): return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_2bin_histosys(source_2bin_histosys): source = source_2bin_histosys spec = { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': source['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["bindata"]["bkg"], + "modifiers": [ { - 'name': 'bkg_norm', - 'type': 'histosys', - 'data': { - 'lo_data': source['bindata']['bkgsys_dn'], - 'hi_data': source['bindata']['bkgsys_up'], + "name": "bkg_norm", + "type": "histosys", + "data": { + "lo_data": source["bindata"]["bkgsys_dn"], + "hi_data": source["bindata"]["bkgsys_up"], }, } ], @@ -291,7 +291,7 @@ def spec_2bin_histosys(source_2bin_histosys): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_2bin_histosys(): expected_result = { "exp": [ @@ -306,7 +306,7 @@ def expected_result_2bin_histosys(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_2bin_2channel(): with open( "validation/data/2bin_2channel_example1.json", encoding="utf-8" @@ -314,30 +314,30 @@ def source_2bin_2channel(): return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_2bin_2channel(source_2bin_2channel): source = source_2bin_2channel spec = { - 'channels': [ + "channels": [ { - 'name': 'signal', - 'samples': [ + "name": "signal", + "samples": [ { - 'name': 'signal', - 'data': source['channels']['signal']['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["channels"]["signal"]["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'background', - 'data': source['channels']['signal']['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["channels"]["signal"]["bindata"]["bkg"], + "modifiers": [ { - 'name': 'uncorr_bkguncrt_signal', - 'type': 'shapesys', - 'data': source['channels']['signal']['bindata'][ - 'bkgerr' + "name": "uncorr_bkguncrt_signal", + "type": "shapesys", + "data": source["channels"]["signal"]["bindata"][ + "bkgerr" ], } ], @@ -345,17 +345,17 @@ def spec_2bin_2channel(source_2bin_2channel): ], }, { - 'name': 'control', - 'samples': [ + "name": "control", + "samples": [ { - 'name': 'background', - 'data': source['channels']['control']['bindata']['bkg'], - 'modifiers': [ + "name": "background", + "data": source["channels"]["control"]["bindata"]["bkg"], + "modifiers": [ { - 'name': 'uncorr_bkguncrt_control', - 'type': 'shapesys', - 'data': source['channels']['control']['bindata'][ - 'bkgerr' + "name": "uncorr_bkguncrt_control", + "type": "shapesys", + "data": source["channels"]["control"]["bindata"][ + "bkgerr" ], } ], @@ -367,7 +367,7 @@ def spec_2bin_2channel(source_2bin_2channel): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_2bin_2channel(): expected_result = { "exp": [ @@ -382,7 +382,7 @@ def expected_result_2bin_2channel(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_2bin_2channel_couplednorm(): with open( "validation/data/2bin_2channel_couplednorm.json", encoding="utf-8" @@ -390,56 +390,56 @@ def source_2bin_2channel_couplednorm(): return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_2bin_2channel_couplednorm(source_2bin_2channel_couplednorm): source = source_2bin_2channel_couplednorm spec = { - 'channels': [ + "channels": [ { - 'name': 'signal', - 'samples': [ + "name": "signal", + "samples": [ { - 'name': 'signal', - 'data': source['channels']['signal']['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["channels"]["signal"]["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'bkg1', - 'data': source['channels']['signal']['bindata']['bkg1'], - 'modifiers': [ + "name": "bkg1", + "data": source["channels"]["signal"]["bindata"]["bkg1"], + "modifiers": [ { - 'name': 'coupled_normsys', - 'type': 'normsys', - 'data': {'lo': 0.9, 'hi': 1.1}, + "name": "coupled_normsys", + "type": "normsys", + "data": {"lo": 0.9, "hi": 1.1}, } ], }, { - 'name': 'bkg2', - 'data': source['channels']['signal']['bindata']['bkg2'], - 'modifiers': [ + "name": "bkg2", + "data": source["channels"]["signal"]["bindata"]["bkg2"], + "modifiers": [ { - 'name': 'coupled_normsys', - 'type': 'normsys', - 'data': {'lo': 0.5, 'hi': 1.5}, + "name": "coupled_normsys", + "type": "normsys", + "data": {"lo": 0.5, "hi": 1.5}, } ], }, ], }, { - 'name': 'control', - 'samples': [ + "name": "control", + "samples": [ { - 'name': 'background', - 'data': source['channels']['control']['bindata']['bkg1'], - 'modifiers': [ + "name": "background", + "data": source["channels"]["control"]["bindata"]["bkg1"], + "modifiers": [ { - 'name': 'coupled_normsys', - 'type': 'normsys', - 'data': {'lo': 0.9, 'hi': 1.1}, + "name": "coupled_normsys", + "type": "normsys", + "data": {"lo": 0.9, "hi": 1.1}, } ], } @@ -450,7 +450,7 @@ def spec_2bin_2channel_couplednorm(source_2bin_2channel_couplednorm): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_2bin_2channel_couplednorm(): # NB: mac/linux differ for exp[0] # Mac: 0.055222676184648795 @@ -469,7 +469,7 @@ def expected_result_2bin_2channel_couplednorm(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_2bin_2channel_coupledhistosys(): with open( "validation/data/2bin_2channel_coupledhisto.json", encoding="utf-8" @@ -477,52 +477,52 @@ def source_2bin_2channel_coupledhistosys(): return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_2bin_2channel_coupledhistosys(source_2bin_2channel_coupledhistosys): source = source_2bin_2channel_coupledhistosys spec = { - 'channels': [ + "channels": [ { - 'name': 'signal', - 'samples': [ + "name": "signal", + "samples": [ { - 'name': 'signal', - 'data': source['channels']['signal']['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["channels"]["signal"]["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'bkg1', - 'data': source['channels']['signal']['bindata']['bkg1'], - 'modifiers': [ + "name": "bkg1", + "data": source["channels"]["signal"]["bindata"]["bkg1"], + "modifiers": [ { - 'name': 'coupled_histosys', - 'type': 'histosys', - 'data': { - 'lo_data': source['channels']['signal']['bindata'][ - 'bkg1_dn' + "name": "coupled_histosys", + "type": "histosys", + "data": { + "lo_data": source["channels"]["signal"]["bindata"][ + "bkg1_dn" ], - 'hi_data': source['channels']['signal']['bindata'][ - 'bkg1_up' + "hi_data": source["channels"]["signal"]["bindata"][ + "bkg1_up" ], }, } ], }, { - 'name': 'bkg2', - 'data': source['channels']['signal']['bindata']['bkg2'], - 'modifiers': [ + "name": "bkg2", + "data": source["channels"]["signal"]["bindata"]["bkg2"], + "modifiers": [ { - 'name': 'coupled_histosys', - 'type': 'histosys', - 'data': { - 'lo_data': source['channels']['signal']['bindata'][ - 'bkg2_dn' + "name": "coupled_histosys", + "type": "histosys", + "data": { + "lo_data": source["channels"]["signal"]["bindata"][ + "bkg2_dn" ], - 'hi_data': source['channels']['signal']['bindata'][ - 'bkg2_up' + "hi_data": source["channels"]["signal"]["bindata"][ + "bkg2_up" ], }, } @@ -531,21 +531,21 @@ def spec_2bin_2channel_coupledhistosys(source_2bin_2channel_coupledhistosys): ], }, { - 'name': 'control', - 'samples': [ + "name": "control", + "samples": [ { - 'name': 'background', - 'data': source['channels']['control']['bindata']['bkg1'], - 'modifiers': [ + "name": "background", + "data": source["channels"]["control"]["bindata"]["bkg1"], + "modifiers": [ { - 'name': 'coupled_histosys', - 'type': 'histosys', - 'data': { - 'lo_data': source['channels']['control']['bindata'][ - 'bkg1_dn' + "name": "coupled_histosys", + "type": "histosys", + "data": { + "lo_data": source["channels"]["control"]["bindata"][ + "bkg1_dn" ], - 'hi_data': source['channels']['control']['bindata'][ - 'bkg1_up' + "hi_data": source["channels"]["control"]["bindata"][ + "bkg1_up" ], }, } @@ -558,7 +558,7 @@ def spec_2bin_2channel_coupledhistosys(source_2bin_2channel_coupledhistosys): return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_2bin_2channel_coupledhistosys(): expected_result = { "exp": [ @@ -573,7 +573,7 @@ def expected_result_2bin_2channel_coupledhistosys(): return expected_result -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def source_2bin_2channel_coupledshapefactor(): with open( "validation/data/2bin_2channel_coupledshapefactor.json", encoding="utf-8" @@ -581,45 +581,45 @@ def source_2bin_2channel_coupledshapefactor(): return json.load(read_json) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spec_2bin_2channel_coupledshapefactor(source_2bin_2channel_coupledshapefactor): source = source_2bin_2channel_coupledshapefactor spec = { - 'channels': [ + "channels": [ { - 'name': 'signal', - 'samples': [ + "name": "signal", + "samples": [ { - 'name': 'signal', - 'data': source['channels']['signal']['bindata']['sig'], - 'modifiers': [ - {'name': 'mu', 'type': 'normfactor', 'data': None} + "name": "signal", + "data": source["channels"]["signal"]["bindata"]["sig"], + "modifiers": [ + {"name": "mu", "type": "normfactor", "data": None} ], }, { - 'name': 'bkg1', - 'data': source['channels']['signal']['bindata']['bkg1'], - 'modifiers': [ + "name": "bkg1", + "data": source["channels"]["signal"]["bindata"]["bkg1"], + "modifiers": [ { - 'name': 'coupled_shapefactor', - 'type': 'shapefactor', - 'data': None, + "name": "coupled_shapefactor", + "type": "shapefactor", + "data": None, } ], }, ], }, { - 'name': 'control', - 'samples': [ + "name": "control", + "samples": [ { - 'name': 'background', - 'data': source['channels']['control']['bindata']['bkg1'], - 'modifiers': [ + "name": "background", + "data": source["channels"]["control"]["bindata"]["bkg1"], + "modifiers": [ { - 'name': 'coupled_shapefactor', - 'type': 'shapefactor', - 'data': None, + "name": "coupled_shapefactor", + "type": "shapefactor", + "data": None, } ], } @@ -630,11 +630,11 @@ def spec_2bin_2channel_coupledshapefactor(source_2bin_2channel_coupledshapefacto return spec -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def expected_result_2bin_2channel_coupledshapefactor(): expected_result = { - 'obs': 0.5421679124909312, - 'exp': [ + "obs": 0.5421679124909312, + "exp": [ 0.013753299929451691, 0.048887400056355966, 0.15555296253957684, @@ -657,7 +657,7 @@ def validate_hypotest( init_pars = pdf.config.suggested_init() par_bounds = pdf.config.suggested_bounds() - kwargs = {'return_expected_set': True, 'test_stat': test_stat, 'calctype': calctype} + kwargs = {"return_expected_set": True, "test_stat": test_stat, "calctype": calctype} np.random.seed(0) CLs_obs, CLs_exp_set = pyhf.infer.hypotest( @@ -669,81 +669,81 @@ def validate_hypotest( **kwargs, ) - assert abs(CLs_obs - expected_result['obs']) / expected_result['obs'] < tolerance - for result, expected in zip(CLs_exp_set, expected_result['exp']): + assert abs(CLs_obs - expected_result["obs"]) / expected_result["obs"] < tolerance + for result, expected in zip(CLs_exp_set, expected_result["exp"]): assert result == pytest.approx(expected, rel=tolerance), result @pytest.fixture( params=[ ( - '1bin_shapesys', - {'init_pars': 2, 'par_bounds': 2}, + "1bin_shapesys", + {"init_pars": 2, "par_bounds": 2}, 1.0, "q", 1e-6, "asymptotics", ), ( - '1bin_shapesys_q0', - {'init_pars': 2, 'par_bounds': 2}, + "1bin_shapesys_q0", + {"init_pars": 2, "par_bounds": 2}, 0.0, "q0", 3e-4, "asymptotics", ), ( - '1bin_shapesys_q0_toys', - {'init_pars': 2, 'par_bounds': 2}, + "1bin_shapesys_q0_toys", + {"init_pars": 2, "par_bounds": 2}, 0.0, "q0", 1e-6, "toybased", ), - ('1bin_lumi', {'init_pars': 2, 'par_bounds': 2}, 1.0, "q", 4e-6, "asymptotics"), + ("1bin_lumi", {"init_pars": 2, "par_bounds": 2}, 1.0, "q", 4e-6, "asymptotics"), ( - '1bin_normsys', - {'init_pars': 2, 'par_bounds': 2}, + "1bin_normsys", + {"init_pars": 2, "par_bounds": 2}, 1.0, "q", 3e-6, "asymptotics", ), ( - '2bin_histosys', - {'init_pars': 2, 'par_bounds': 2}, + "2bin_histosys", + {"init_pars": 2, "par_bounds": 2}, 1.0, "q", 8e-5, "asymptotics", ), ( - '2bin_2channel', - {'init_pars': 5, 'par_bounds': 5}, + "2bin_2channel", + {"init_pars": 5, "par_bounds": 5}, 1.0, "q", 1e-6, "asymptotics", ), ( - '2bin_2channel_couplednorm', - {'init_pars': 2, 'par_bounds': 2}, + "2bin_2channel_couplednorm", + {"init_pars": 2, "par_bounds": 2}, 1.0, "q", 1e-6, "asymptotics", ), ( - '2bin_2channel_coupledhistosys', - {'auxdata': 1, 'init_pars': 2, 'par_bounds': 2}, + "2bin_2channel_coupledhistosys", + {"auxdata": 1, "init_pars": 2, "par_bounds": 2}, 1.0, "q", 1e-6, "asymptotics", ), ( - '2bin_2channel_coupledshapefactor', - {'auxdata': 0, 'init_pars': 3, 'par_bounds': 3}, + "2bin_2channel_coupledshapefactor", + {"auxdata": 0, "init_pars": 3, "par_bounds": 3}, 1.0, "q", 2.5e-6, @@ -751,16 +751,16 @@ def validate_hypotest( ), ], ids=[ - '1bin_shapesys_mu1', - '1bin_shapesys_q0_mu1', - '1bin_shapesys_q0_mu1_toys', - '1bin_lumi_mu1', - '1bin_normsys_mu1', - '2bin_histosys_mu1', - '2bin_2channel_mu1', - '2bin_2channel_couplednorm_mu1', - '2bin_2channel_coupledhistosys_mu1', - '2bin_2channel_coupledshapefactor_mu1', + "1bin_shapesys_mu1", + "1bin_shapesys_q0_mu1", + "1bin_shapesys_q0_mu1_toys", + "1bin_lumi_mu1", + "1bin_normsys_mu1", + "2bin_histosys_mu1", + "2bin_2channel_mu1", + "2bin_2channel_couplednorm_mu1", + "2bin_2channel_coupledhistosys_mu1", + "2bin_2channel_coupledshapefactor_mu1", ], ) def setup(request): @@ -774,10 +774,10 @@ def setup(request): tolerance = request.param[4] calctype = request.param[5] return { - 'source': source, - 'spec': spec, - 'mu': mu, - 'expected': {'result': expected_result, 'config': config}, + "source": source, + "spec": spec, + "mu": mu, + "expected": {"result": expected_result, "config": config}, "test_stat": test_stat, "tolerance": tolerance, "calctype": calctype, @@ -785,7 +785,7 @@ def setup(request): def test_validation(setup): - source = setup['source'] + source = setup["source"] pdf = pyhf.Model( setup["spec"], @@ -793,92 +793,92 @@ def test_validation(setup): poi_name="mu", ) - if 'channels' in source: + if "channels" in source: data = [] for c in pdf.config.channels: - data += source['channels'][c]['bindata']['data'] + data += source["channels"][c]["bindata"]["data"] data = data + pdf.config.auxdata else: - data = source['bindata']['data'] + pdf.config.auxdata + data = source["bindata"]["data"] + pdf.config.auxdata - if 'auxdata' in setup['expected']['config']: - assert len(pdf.config.auxdata) == setup['expected']['config']['auxdata'] - assert len(pdf.config.suggested_init()) == setup['expected']['config']['init_pars'] + if "auxdata" in setup["expected"]["config"]: + assert len(pdf.config.auxdata) == setup["expected"]["config"]["auxdata"] + assert len(pdf.config.suggested_init()) == setup["expected"]["config"]["init_pars"] assert ( - len(pdf.config.suggested_bounds()) == setup['expected']['config']['par_bounds'] + len(pdf.config.suggested_bounds()) == setup["expected"]["config"]["par_bounds"] ) validate_hypotest( pdf, data, - setup['mu'], - setup['expected']['result'], - test_stat=setup['test_stat'], - tolerance=setup['tolerance'], - calctype=setup['calctype'], + setup["mu"], + setup["expected"]["result"], + test_stat=setup["test_stat"], + tolerance=setup["tolerance"], + calctype=setup["calctype"], ) @pytest.mark.parametrize( - 'top_level, base_dir', + "top_level, base_dir", [ ( - 'validation/xmlimport_input/config/example.xml', - 'validation/xmlimport_input/', + "validation/xmlimport_input/config/example.xml", + "validation/xmlimport_input/", ), ( - 'validation/xmlimport_input2/config/example.xml', - 'validation/xmlimport_input2', + "validation/xmlimport_input2/config/example.xml", + "validation/xmlimport_input2", ), ( - 'validation/xmlimport_input3/config/examples/example_ShapeSys.xml', - 'validation/xmlimport_input3', + "validation/xmlimport_input3/config/examples/example_ShapeSys.xml", + "validation/xmlimport_input3", ), ], - ids=['example-one', 'example-two', 'example-three'], + ids=["example-one", "example-two", "example-three"], ) def test_import_roundtrip(tmp_path, top_level, base_dir): parsed_xml_before = pyhf.readxml.parse(top_level, base_dir) spec = { - 'channels': parsed_xml_before['channels'], - 'parameters': parsed_xml_before['measurements'][0]['config']['parameters'], + "channels": parsed_xml_before["channels"], + "parameters": parsed_xml_before["measurements"][0]["config"]["parameters"], } - pdf_before = pyhf.Model(spec, poi_name='SigXsecOverSM') + pdf_before = pyhf.Model(spec, poi_name="SigXsecOverSM") - tmp_config = tmp_path.joinpath('config') + tmp_config = tmp_path.joinpath("config") tmp_config.mkdir() - tmp_data = tmp_path.joinpath('data') + tmp_data = tmp_path.joinpath("data") tmp_data.mkdir() - tmp_xml = tmp_path.joinpath('FitConfig.xml') + tmp_xml = tmp_path.joinpath("FitConfig.xml") tmp_xml.write_text( pyhf.writexml.writexml( parsed_xml_before, tmp_config, tmp_data, - tmp_path.joinpath('FitConfig'), - ).decode('utf-8') + tmp_path.joinpath("FitConfig"), + ).decode("utf-8") ) parsed_xml_after = pyhf.readxml.parse(tmp_xml, tmp_path) spec = { - 'channels': parsed_xml_after['channels'], - 'parameters': parsed_xml_after['measurements'][0]['config']['parameters'], + "channels": parsed_xml_after["channels"], + "parameters": parsed_xml_after["measurements"][0]["config"]["parameters"], } - pdf_after = pyhf.Model(spec, poi_name='SigXsecOverSM') + pdf_after = pyhf.Model(spec, poi_name="SigXsecOverSM") data_before = [ binvalue for k in pdf_before.config.channels for binvalue in next( - obs for obs in parsed_xml_before['observations'] if obs['name'] == k - )['data'] + obs for obs in parsed_xml_before["observations"] if obs["name"] == k + )["data"] ] + pdf_before.config.auxdata data_after = [ binvalue for k in pdf_after.config.channels for binvalue in next( - obs for obs in parsed_xml_after['observations'] if obs['name'] == k - )['data'] + obs for obs in parsed_xml_after["observations"] if obs["name"] == k + )["data"] ] + pdf_after.config.auxdata assert data_before == data_after @@ -975,32 +975,32 @@ def test_shapesys_nuisparfilter_validation(): ws = pyhf.Workspace(spec) model = ws.model( modifier_settings={ - 'normsys': {'interpcode': 'code4'}, - 'histosys': {'interpcode': 'code4p'}, + "normsys": {"interpcode": "code4"}, + "histosys": {"interpcode": "code4p"}, }, ) data = ws.data(model) obs, exp = pyhf.infer.hypotest(1.0, data, model, return_expected_set=True) - pyhf_results = {'CLs_obs': obs, 'CLs_exp': [e for e in exp]} + pyhf_results = {"CLs_obs": obs, "CLs_exp": [e for e in exp]} assert np.allclose( - reference_root_results['CLs_obs'], pyhf_results['CLs_obs'], atol=1e-4, rtol=1e-5 + reference_root_results["CLs_obs"], pyhf_results["CLs_obs"], atol=1e-4, rtol=1e-5 ) assert np.allclose( - reference_root_results['CLs_exp'], pyhf_results['CLs_exp'], atol=1e-4, rtol=1e-5 + reference_root_results["CLs_exp"], pyhf_results["CLs_exp"], atol=1e-4, rtol=1e-5 ) @pytest.mark.parametrize( - 'backend', + "backend", [ pyhf.tensor.numpy_backend, pyhf.tensor.jax_backend, ], ) -@pytest.mark.parametrize('optimizer', ['scipy', 'minuit']) +@pytest.mark.parametrize("optimizer", ["scipy", "minuit"]) def test_optimizer_stitching(backend, optimizer): - pyhf.set_backend(backend(precision='64b'), optimizer) + pyhf.set_backend(backend(precision="64b"), optimizer) pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10]) data = [125.0] + pdf.config.auxdata @@ -1016,14 +1016,14 @@ def test_optimizer_stitching(backend, optimizer): @pytest.mark.parametrize( - 'backend', + "backend", [ pyhf.tensor.jax_backend, ], ) -@pytest.mark.parametrize('optimizer,rtol', [('scipy', 1e-6), ('minuit', 1e-3)]) +@pytest.mark.parametrize("optimizer,rtol", [("scipy", 1e-6), ("minuit", 1e-3)]) def test_optimizer_grad(backend, optimizer, rtol): - pyhf.set_backend(backend(precision='64b'), optimizer) + pyhf.set_backend(backend(precision="64b"), optimizer) pdf = pyhf.simplemodels.uncorrelated_background([50.0], [100.0], [10]) data = [125.0] + pdf.config.auxdata diff --git a/tests/test_workspace.py b/tests/test_workspace.py index 3a4cdb684e..fbd6560b7a 100644 --- a/tests/test_workspace.py +++ b/tests/test_workspace.py @@ -13,28 +13,28 @@ @pytest.fixture( - scope='session', + scope="session", params=[ ( - 'validation/xmlimport_input/config/example.xml', - 'validation/xmlimport_input/', + "validation/xmlimport_input/config/example.xml", + "validation/xmlimport_input/", ), ( - 'validation/xmlimport_input2/config/example.xml', - 'validation/xmlimport_input2', + "validation/xmlimport_input2/config/example.xml", + "validation/xmlimport_input2", ), ( - 'validation/xmlimport_input3/config/examples/example_ShapeSys.xml', - 'validation/xmlimport_input3', + "validation/xmlimport_input3/config/examples/example_ShapeSys.xml", + "validation/xmlimport_input3", ), ], - ids=['example-one', 'example-two', 'example-three'], + ids=["example-one", "example-two", "example-three"], ) def workspace_xml(request): return pyhf.readxml.parse(*request.param) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def workspace_factory(workspace_xml): return lambda: pyhf.Workspace(workspace_xml) @@ -73,10 +73,10 @@ def test_get_measurement(workspace_factory): w = workspace_factory() for measurement in w.measurement_names: m = w.get_measurement(measurement_name=measurement) - assert m['name'] == measurement + assert m["name"] == measurement for measurement_idx in range(len(w.measurement_names)): m = w.get_measurement(measurement_index=measurement_idx) - assert m['name'] == w.measurement_names[measurement_idx] + assert m["name"] == w.measurement_names[measurement_idx] def test_get_measurement_nonexist(workspace_factory): @@ -84,7 +84,7 @@ def test_get_measurement_nonexist(workspace_factory): with pytest.raises( pyhf.exceptions.InvalidMeasurement, match="nonexistent_measurement" ): - w.get_measurement(measurement_name='nonexistent_measurement') + w.get_measurement(measurement_name="nonexistent_measurement") def test_get_measurement_index_outofbounds(workspace_factory): @@ -109,26 +109,26 @@ def test_get_workspace_measurement_priority(workspace_factory): m = w.get_measurement( measurement_name=w.measurement_names[0], measurement_index=999 ) - assert m['name'] == w.measurement_names[0] + assert m["name"] == w.measurement_names[0] # only in cases where we have more than one measurement to pick from if len(w.measurement_names) > 1: - assert m['name'] != w.measurement_names[-1] + assert m["name"] != w.measurement_names[-1] def test_get_measurement_schema_validation(mocker, workspace_factory): - mocker.patch('pyhf.schema.validate', return_value=None) + mocker.patch("pyhf.schema.validate", return_value=None) assert pyhf.schema.validate.called is False w = workspace_factory() assert pyhf.schema.validate.call_count == 1 - assert pyhf.schema.validate.call_args[0][1] == 'workspace.json' + assert pyhf.schema.validate.call_args[0][1] == "workspace.json" w.get_measurement() assert pyhf.schema.validate.call_count == 2 - assert pyhf.schema.validate.call_args[0][1] == 'measurement.json' + assert pyhf.schema.validate.call_args[0][1] == "measurement.json" def test_get_workspace_repr(workspace_factory): w = workspace_factory() - assert 'pyhf.workspace.Workspace' in str(w) + assert "pyhf.workspace.Workspace" in str(w) def test_get_workspace_model_default(workspace_factory): @@ -147,15 +147,15 @@ def test_get_workspace_model_nopoi(workspace_factory): def test_get_workspace_model_overridepoi(workspace_factory): w = workspace_factory() - m = w.model(poi_name='lumi') + m = w.model(poi_name="lumi") - assert m.config.poi_name == 'lumi' + assert m.config.poi_name == "lumi" def test_get_workspace_model_fakepoi(workspace_factory): w = workspace_factory() with pytest.raises(pyhf.exceptions.InvalidModel): - w.model(poi_name='afakepoi') + w.model(poi_name="afakepoi") def test_workspace_observations(workspace_factory): @@ -183,7 +183,7 @@ def test_get_workspace_data_bad_model(workspace_factory, caplog, mocker): new_callable=mocker.PropertyMock, return_value=["fakechannel"], ) - with caplog.at_level(logging.INFO, 'pyhf.pdf'): + with caplog.at_level(logging.INFO, "pyhf.pdf"): with pytest.raises(KeyError): assert w.data(m) assert "Invalid channel" in caplog.text @@ -196,10 +196,10 @@ def test_json_serializable(workspace_factory): @pytest.mark.parametrize( "kwargs", [ - dict(channels=['fake-name']), - dict(samples=['fake-sample']), - dict(modifiers=['fake-modifier']), - dict(modifier_types=['fake-type']), + dict(channels=["fake-name"]), + dict(samples=["fake-sample"]), + dict(modifiers=["fake-modifier"]), + dict(modifier_types=["fake-type"]), ], ) def test_prune_error(workspace_factory, kwargs): @@ -220,7 +220,7 @@ def test_prune_channel(workspace_factory): else: new_ws = ws.prune(channels=[channel]) assert channel not in new_ws.channels - assert channel not in [obs['name'] for obs in new_ws['observations']] + assert channel not in [obs["name"] for obs in new_ws["observations"]] def test_prune_sample(workspace_factory): @@ -235,22 +235,22 @@ def test_prune_sample(workspace_factory): def test_prune_modifier(workspace_factory): ws = workspace_factory() - modifier = 'lumi' + modifier = "lumi" with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation): ws.prune(modifiers=modifier) new_ws = ws.prune(modifiers=[modifier]) assert modifier not in new_ws.model().config.parameters assert modifier not in [ - p['name'] - for measurement in new_ws['measurements'] - for p in measurement['config']['parameters'] + p["name"] + for measurement in new_ws["measurements"] + for p in measurement["config"]["parameters"] ] def test_prune_modifier_type(workspace_factory): ws = workspace_factory() - modifier_type = 'lumi' + modifier_type = "lumi" with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation): ws.prune(modifier_types=modifier_type) @@ -280,19 +280,19 @@ def test_prune_measurements(workspace_factory): def test_rename_channel(workspace_factory): ws = workspace_factory() channel = ws.channels[0] - renamed = 'renamedChannel' + renamed = "renamedChannel" assert renamed not in ws.channels new_ws = ws.rename(channels={channel: renamed}) assert channel not in new_ws.channels assert renamed in new_ws.channels - assert channel not in [obs['name'] for obs in new_ws['observations']] - assert renamed in [obs['name'] for obs in new_ws['observations']] + assert channel not in [obs["name"] for obs in new_ws["observations"]] + assert renamed in [obs["name"] for obs in new_ws["observations"]] def test_rename_sample(workspace_factory): ws = workspace_factory() sample = ws.samples[1] - renamed = 'renamedSample' + renamed = "renamedSample" assert renamed not in ws.samples new_ws = ws.rename(samples={sample: renamed}) assert sample not in new_ws.samples @@ -302,7 +302,7 @@ def test_rename_sample(workspace_factory): def test_rename_modifier(workspace_factory): ws = workspace_factory() modifier = ws.model().config.parameters[0] - renamed = 'renamedModifier' + renamed = "renamedModifier" assert renamed not in ws.model().config.parameters new_ws = ws.rename(modifiers={modifier: renamed}) assert modifier not in new_ws.model().config.parameters @@ -311,48 +311,48 @@ def test_rename_modifier(workspace_factory): def test_rename_poi(workspace_factory): ws = workspace_factory() - poi = ws.get_measurement()['config']['poi'] - renamed = 'renamedPoi' + poi = ws.get_measurement()["config"]["poi"] + renamed = "renamedPoi" assert renamed not in ws.model().config.parameters new_ws = ws.rename(modifiers={poi: renamed}) assert poi not in new_ws.model().config.parameters assert renamed in new_ws.model().config.parameters - assert new_ws.get_measurement()['config']['poi'] == renamed + assert new_ws.get_measurement()["config"]["poi"] == renamed def test_rename_measurement(workspace_factory): ws = workspace_factory() measurement = ws.measurement_names[0] - renamed = 'renamedMeasurement' + renamed = "renamedMeasurement" assert renamed not in ws.measurement_names new_ws = ws.rename(measurements={measurement: renamed}) assert measurement not in new_ws.measurement_names assert renamed in new_ws.measurement_names -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def join_items(): left = [ - {'name': 'left', 'key': 'value', 'deep': [{'name': 1}]}, - {'name': 'common', 'key': 'left', 'deep': [{'name': 1}]}, + {"name": "left", "key": "value", "deep": [{"name": 1}]}, + {"name": "common", "key": "left", "deep": [{"name": 1}]}, ] right = [ - {'name': 'right', 'key': 'value', 'deep': [{'name': 2}]}, - {'name': 'common', 'key': 'right', 'deep': [{'name': 2}]}, + {"name": "right", "key": "value", "deep": [{"name": 2}]}, + {"name": "common", "key": "right", "deep": [{"name": 2}]}, ] return (left, right) def test_join_items_none(join_items): left_items, right_items = join_items - joined = pyhf.workspace._join_items('none', left_items, right_items, key='name') + joined = pyhf.workspace._join_items("none", left_items, right_items, key="name") assert all(left in joined for left in left_items) assert all(right in joined for right in right_items) def test_join_items_outer(join_items): left_items, right_items = join_items - joined = pyhf.workspace._join_items('outer', left_items, right_items, key='name') + joined = pyhf.workspace._join_items("outer", left_items, right_items, key="name") assert all(left in joined for left in left_items) assert all(right in joined for right in right_items) @@ -360,7 +360,7 @@ def test_join_items_outer(join_items): def test_join_items_left_outer(join_items): left_items, right_items = join_items joined = pyhf.workspace._join_items( - 'left outer', left_items, right_items, key='name' + "left outer", left_items, right_items, key="name" ) assert all(left in joined for left in left_items) assert not all(right in joined for right in right_items) @@ -369,7 +369,7 @@ def test_join_items_left_outer(join_items): def test_join_items_right_outer(join_items): left_items, right_items = join_items joined = pyhf.workspace._join_items( - 'right outer', left_items, right_items, key='name' + "right outer", left_items, right_items, key="name" ) assert not all(left in joined for left in left_items) assert all(right in joined for right in right_items) @@ -378,96 +378,96 @@ def test_join_items_right_outer(join_items): def test_join_items_outer_deep(join_items): left_items, right_items = join_items joined = pyhf.workspace._join_items( - 'outer', left_items, right_items, key='name', deep_merge_key='deep' + "outer", left_items, right_items, key="name", deep_merge_key="deep" ) - assert next(k['deep'] for k in joined if k['name'] == 'common') == [ - {'name': 1}, - {'name': 2}, + assert next(k["deep"] for k in joined if k["name"] == "common") == [ + {"name": 1}, + {"name": 2}, ] def test_join_items_left_outer_deep(join_items): left_items, right_items = join_items joined = pyhf.workspace._join_items( - 'left outer', left_items, right_items, key='name', deep_merge_key='deep' + "left outer", left_items, right_items, key="name", deep_merge_key="deep" ) - assert next(k['deep'] for k in joined if k['name'] == 'common') == [ - {'name': 1}, - {'name': 2}, + assert next(k["deep"] for k in joined if k["name"] == "common") == [ + {"name": 1}, + {"name": 2}, ] def test_join_items_right_outer_deep(join_items): left_items, right_items = join_items joined = pyhf.workspace._join_items( - 'right outer', left_items, right_items, key='name', deep_merge_key='deep' + "right outer", left_items, right_items, key="name", deep_merge_key="deep" ) - assert next(k['deep'] for k in joined if k['name'] == 'common') == [ - {'name': 2}, - {'name': 1}, + assert next(k["deep"] for k in joined if k["name"] == "common") == [ + {"name": 2}, + {"name": 1}, ] -@pytest.mark.parametrize("join", ['none', 'outer']) +@pytest.mark.parametrize("join", ["none", "outer"]) def test_combine_workspace_same_channels_incompatible_structure( workspace_factory, join ): ws = workspace_factory() new_ws = ws.rename( - samples={ws.samples[0]: 'sample_other'}, + samples={ws.samples[0]: "sample_other"}, ) with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation, match="channel1"): pyhf.Workspace.combine(ws, new_ws, join=join) -@pytest.mark.parametrize("join", ['outer', 'left outer', 'right outer']) +@pytest.mark.parametrize("join", ["outer", "left outer", "right outer"]) def test_combine_workspace_same_channels_outer_join(workspace_factory, join): ws = workspace_factory() - new_ws = ws.rename(channels={ws.channels[-1]: 'new_channel'}) + new_ws = ws.rename(channels={ws.channels[-1]: "new_channel"}) combined = pyhf.Workspace.combine(ws, new_ws, join=join) assert all(channel in combined.channels for channel in ws.channels) assert all(channel in combined.channels for channel in new_ws.channels) -@pytest.mark.parametrize("join", ['left outer', 'right outer']) +@pytest.mark.parametrize("join", ["left outer", "right outer"]) def test_combine_workspace_same_channels_outer_join_unsafe( workspace_factory, join, caplog ): ws = workspace_factory() - new_ws = ws.rename(channels={ws.channels[-1]: 'new_channel'}) + new_ws = ws.rename(channels={ws.channels[-1]: "new_channel"}) pyhf.Workspace.combine(ws, new_ws, join=join) - assert 'using an unsafe join operation' in caplog.text + assert "using an unsafe join operation" in caplog.text -@pytest.mark.parametrize("join", ['none', 'outer']) +@pytest.mark.parametrize("join", ["none", "outer"]) def test_combine_workspace_incompatible_poi(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - modifiers={ws.get_measurement()['config']['poi']: 'renamedPOI'}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + modifiers={ws.get_measurement()["config"]["poi"]: "renamedPOI"}, ) with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation, match="GaussExample"): pyhf.Workspace.combine(ws, new_ws, join=join) -@pytest.mark.parametrize("join", ['none', 'outer', 'left outer', 'right outer']) +@pytest.mark.parametrize("join", ["none", "outer", "left outer", "right outer"]) def test_combine_workspace_diff_version(workspace_factory, join): ws = workspace_factory() - ws.version = '1.0.0' + ws.version = "1.0.0" new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - samples={sample: f'renamed_{sample}' for sample in ws.samples}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, modifiers={ - modifier: f'renamed_{modifier}' + modifier: f"renamed_{modifier}" for modifier, _ in ws.modifiers - if not modifier == 'lumi' + if not modifier == "lumi" }, measurements={ - measurement: f'renamed_{measurement}' + measurement: f"renamed_{measurement}" for measurement in ws.measurement_names }, ) - new_ws['version'] = '1.2.0' + new_ws["version"] = "1.2.0" with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation) as exc_info: pyhf.Workspace.combine(ws, new_ws, join=join) # Using asserts with str(exc_info.value) over pytest.raises(..., match="...") @@ -476,64 +476,64 @@ def test_combine_workspace_diff_version(workspace_factory, join): assert "1.2.0" in str(exc_info.value) -@pytest.mark.parametrize("join", ['none']) +@pytest.mark.parametrize("join", ["none"]) def test_combine_workspace_duplicate_parameter_configs(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation, match="GaussExample"): pyhf.Workspace.combine(ws, new_ws, join=join) -@pytest.mark.parametrize("join", ['outer', 'left outer', 'right outer']) +@pytest.mark.parametrize("join", ["outer", "left outer", "right outer"]) def test_combine_workspace_duplicate_parameter_configs_outer_join( workspace_factory, join ): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) combined = pyhf.Workspace.combine(ws, new_ws, join=join) - poi = ws.get_measurement(measurement_name='GaussExample')['config']['poi'] + poi = ws.get_measurement(measurement_name="GaussExample")["config"]["poi"] ws_parameter_configs = [ - parameter['name'] - for parameter in ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + parameter["name"] + for parameter in ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ] ] new_ws_parameter_configs = [ - parameter['name'] - for parameter in new_ws.get_measurement(measurement_name='GaussExample')[ - 'config' - ]['parameters'] + parameter["name"] + for parameter in new_ws.get_measurement(measurement_name="GaussExample")[ + "config" + ]["parameters"] ] combined_parameter_configs = [ - parameter['name'] - for parameter in combined.get_measurement(measurement_name='GaussExample')[ - 'config' - ]['parameters'] + parameter["name"] + for parameter in combined.get_measurement(measurement_name="GaussExample")[ + "config" + ]["parameters"] ] assert poi in ws_parameter_configs assert poi in new_ws_parameter_configs assert poi in combined_parameter_configs - assert 'lumi' in ws_parameter_configs - assert 'lumi' in new_ws_parameter_configs - assert 'lumi' in combined_parameter_configs + assert "lumi" in ws_parameter_configs + assert "lumi" in new_ws_parameter_configs + assert "lumi" in combined_parameter_configs assert len(combined_parameter_configs) == len(set(combined_parameter_configs)) def test_combine_workspace_parameter_configs_ordering(workspace_factory): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) assert ( - ws.get_measurement(measurement_name='GaussExample')['config']['parameters'] - == new_ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"] + == new_ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ] ) @@ -541,74 +541,74 @@ def test_combine_workspace_parameter_configs_ordering(workspace_factory): def test_combine_workspace_observation_ordering(workspace_factory): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) - assert ws['observations'][0]['data'] == new_ws['observations'][0]['data'] + assert ws["observations"][0]["data"] == new_ws["observations"][0]["data"] def test_combine_workspace_deepcopied(workspace_factory): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) - new_ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][0][ - 'bounds' + new_ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][0][ + "bounds" ] = [[0.0, 1.0]] - new_ws['observations'][0]['data'][0] = -10.0 + new_ws["observations"][0]["data"][0] = -10.0 assert ( - ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][0][ - 'bounds' + ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][0][ + "bounds" ] - != new_ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' - ][0]['bounds'] + != new_ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" + ][0]["bounds"] ) - assert ws['observations'][0]['data'] != new_ws['observations'][0]['data'] + assert ws["observations"][0]["data"] != new_ws["observations"][0]["data"] -@pytest.mark.parametrize("join", ['fake join operation']) +@pytest.mark.parametrize("join", ["fake join operation"]) def test_combine_workspace_invalid_join_operation(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) with pytest.raises(ValueError, match=join): pyhf.Workspace.combine(ws, new_ws, join=join) -@pytest.mark.parametrize("join", ['none']) +@pytest.mark.parametrize("join", ["none"]) def test_combine_workspace_invalid_join_operation_merge(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) with pytest.raises(ValueError, match=join): pyhf.Workspace.combine(ws, new_ws, join=join, merge_channels=True) -@pytest.mark.parametrize("join", ['none']) +@pytest.mark.parametrize("join", ["none"]) def test_combine_workspace_incompatible_parameter_configs(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) - new_ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][0][ - 'bounds' + new_ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][0][ + "bounds" ] = [[0.0, 1.0]] with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation, match="GaussExample"): pyhf.Workspace.combine(ws, new_ws, join=join) -@pytest.mark.parametrize("join", ['outer']) +@pytest.mark.parametrize("join", ["outer"]) def test_combine_workspace_incompatible_parameter_configs_outer_join( workspace_factory, join ): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) - new_ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][0][ - 'bounds' + new_ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][0][ + "bounds" ] = [[0.0, 1.0]] with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation) as exc_info: pyhf.Workspace.combine(ws, new_ws, join=join) @@ -623,31 +623,31 @@ def test_combine_workspace_incompatible_parameter_configs_outer_join( ][0]["name"] in str(exc_info.value) -@pytest.mark.parametrize("join", ['outer']) +@pytest.mark.parametrize("join", ["outer"]) def test_combine_workspace_compatible_parameter_configs_outer_join( workspace_factory, join ): ws = workspace_factory() - left_parameters = ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + left_parameters = ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ] - right_parameters = ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + right_parameters = ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ] assert pyhf.workspace._join_parameter_configs( - 'GaussExample', left_parameters, right_parameters + "GaussExample", left_parameters, right_parameters ) assert pyhf.workspace._join_measurements( - join, ws['measurements'], ws['measurements'] + join, ws["measurements"], ws["measurements"] ) -@pytest.mark.parametrize("join", ['outer']) +@pytest.mark.parametrize("join", ["outer"]) def test_combine_workspace_measurements_outer_join(workspace_factory, join): ws = workspace_factory() - left_measurements = ws['measurements'] - right_measurements = copy.deepcopy(ws['measurements']) - right_measurements[0]['config']['parameters'][0]['name'] = 'fake' + left_measurements = ws["measurements"] + right_measurements = copy.deepcopy(ws["measurements"]) + right_measurements[0]["config"]["parameters"][0]["name"] = "fake" assert pyhf.workspace._join_measurements( join, left_measurements, right_measurements ) @@ -658,17 +658,17 @@ def test_combine_workspace_incompatible_parameter_configs_left_outer_join( ): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) - new_ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][0][ - 'bounds' + new_ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][0][ + "bounds" ] = [[0.0, 1.0]] - combined = pyhf.Workspace.combine(ws, new_ws, join='left outer') + combined = pyhf.Workspace.combine(ws, new_ws, join="left outer") assert ( - combined.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + combined.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ][0] - == ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][ + == ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][ 0 ] ) @@ -679,40 +679,40 @@ def test_combine_workspace_incompatible_parameter_configs_right_outer_join( ): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, ) - new_ws.get_measurement(measurement_name='GaussExample')['config']['parameters'][0][ - 'bounds' + new_ws.get_measurement(measurement_name="GaussExample")["config"]["parameters"][0][ + "bounds" ] = [[0.0, 1.0]] - combined = pyhf.Workspace.combine(ws, new_ws, join='right outer') + combined = pyhf.Workspace.combine(ws, new_ws, join="right outer") assert ( - combined.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + combined.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ][0] - == new_ws.get_measurement(measurement_name='GaussExample')['config'][ - 'parameters' + == new_ws.get_measurement(measurement_name="GaussExample")["config"][ + "parameters" ][0] ) -@pytest.mark.parametrize("join", ['none', 'outer']) +@pytest.mark.parametrize("join", ["none", "outer"]) def test_combine_workspace_incompatible_observations(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - samples={sample: f'renamed_{sample}' for sample in ws.samples}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, modifiers={ - modifier: f'renamed_{modifier}' + modifier: f"renamed_{modifier}" for modifier, _ in ws.modifiers - if not modifier == 'lumi' + if not modifier == "lumi" }, measurements={ - measurement: f'renamed_{measurement}' + measurement: f"renamed_{measurement}" for measurement in ws.measurement_names }, ) - new_ws['observations'][0]['name'] = ws['observations'][0]['name'] - new_ws['observations'][0]['data'][0] = -10.0 + new_ws["observations"][0]["name"] = ws["observations"][0]["name"] + new_ws["observations"][0]["data"][0] = -10.0 with pytest.raises(pyhf.exceptions.InvalidWorkspaceOperation) as exc_info: pyhf.Workspace.combine(ws, new_ws, join=join) # Using asserts with str(exc_info.value) over pytest.raises(..., match="...") @@ -724,48 +724,48 @@ def test_combine_workspace_incompatible_observations(workspace_factory, join): def test_combine_workspace_incompatible_observations_left_outer(workspace_factory): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - samples={sample: f'renamed_{sample}' for sample in ws.samples}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, modifiers={ - modifier: f'renamed_{modifier}' + modifier: f"renamed_{modifier}" for modifier, _ in ws.modifiers - if not modifier == 'lumi' + if not modifier == "lumi" }, measurements={ - measurement: f'renamed_{measurement}' + measurement: f"renamed_{measurement}" for measurement in ws.measurement_names }, ) - new_ws['observations'][0]['name'] = ws['observations'][0]['name'] - new_ws['observations'][0]['data'][0] = -10.0 - combined = pyhf.Workspace.combine(ws, new_ws, join='left outer') + new_ws["observations"][0]["name"] = ws["observations"][0]["name"] + new_ws["observations"][0]["data"][0] = -10.0 + combined = pyhf.Workspace.combine(ws, new_ws, join="left outer") assert ( - combined.observations[ws['observations'][0]['name']] - == ws['observations'][0]['data'] + combined.observations[ws["observations"][0]["name"]] + == ws["observations"][0]["data"] ) def test_combine_workspace_incompatible_observations_right_outer(workspace_factory): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - samples={sample: f'renamed_{sample}' for sample in ws.samples}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, modifiers={ - modifier: f'renamed_{modifier}' + modifier: f"renamed_{modifier}" for modifier, _ in ws.modifiers - if not modifier == 'lumi' + if not modifier == "lumi" }, measurements={ - measurement: f'renamed_{measurement}' + measurement: f"renamed_{measurement}" for measurement in ws.measurement_names }, ) - new_ws['observations'][0]['name'] = ws['observations'][0]['name'] - new_ws['observations'][0]['data'][0] = -10.0 - combined = pyhf.Workspace.combine(ws, new_ws, join='right outer') + new_ws["observations"][0]["name"] = ws["observations"][0]["name"] + new_ws["observations"][0]["data"][0] = -10.0 + combined = pyhf.Workspace.combine(ws, new_ws, join="right outer") assert ( - combined.observations[ws['observations'][0]['name']] - == new_ws['observations'][0]['data'] + combined.observations[ws["observations"][0]["name"]] + == new_ws["observations"][0]["data"] ) @@ -773,15 +773,15 @@ def test_combine_workspace_incompatible_observations_right_outer(workspace_facto def test_combine_workspace(workspace_factory, join): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - samples={sample: f'renamed_{sample}' for sample in ws.samples}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, modifiers={ - modifier: f'renamed_{modifier}' + modifier: f"renamed_{modifier}" for modifier, _ in ws.modifiers - if not modifier == 'lumi' + if not modifier == "lumi" }, measurements={ - measurement: f'renamed_{measurement}' + measurement: f"renamed_{measurement}" for measurement in ws.measurement_names }, ) @@ -820,21 +820,21 @@ def test_workspace_equality(workspace_factory): ws_other = workspace_factory() assert ws == ws assert ws == ws_other - assert ws != 'not a workspace' + assert ws != "not a workspace" def test_workspace_inheritance(workspace_factory): ws = workspace_factory() new_ws = ws.rename( - channels={channel: f'renamed_{channel}' for channel in ws.channels}, - samples={sample: f'renamed_{sample}' for sample in ws.samples}, + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, modifiers={ - modifier: f'renamed_{modifier}' + modifier: f"renamed_{modifier}" for modifier, _ in ws.modifiers - if not modifier == 'lumi' + if not modifier == "lumi" }, measurements={ - measurement: f'renamed_{measurement}' + measurement: f"renamed_{measurement}" for measurement in ws.measurement_names }, ) @@ -846,33 +846,33 @@ class FooWorkspace(pyhf.Workspace): assert isinstance(combined, FooWorkspace) -@pytest.mark.parametrize("join", ['outer', 'left outer', 'right outer']) +@pytest.mark.parametrize("join", ["outer", "left outer", "right outer"]) def test_combine_workspace_merge_channels(workspace_factory, join): ws = workspace_factory() new_ws = ws.prune(samples=ws.samples[1:]).rename( - samples={ws.samples[0]: f'renamed_{ws.samples[0]}'} + samples={ws.samples[0]: f"renamed_{ws.samples[0]}"} ) combined_ws = pyhf.Workspace.combine(ws, new_ws, join=join, merge_channels=True) assert new_ws.samples[0] in combined_ws.samples assert any( - sample['name'] == new_ws.samples[0] - for sample in combined_ws['channels'][0]['samples'] + sample["name"] == new_ws.samples[0] + for sample in combined_ws["channels"][0]["samples"] ) def test_sorted(workspace_factory): ws = workspace_factory() # force the first sample in each channel to be last - for channel in ws['channels']: - channel['samples'][0]['name'] = 'zzzzlast' + for channel in ws["channels"]: + channel["samples"][0]["name"] = "zzzzlast" new_ws = pyhf.Workspace.sorted(ws) - for channel in ws['channels']: + for channel in ws["channels"]: # check no sort - assert channel['samples'][0]['name'] == 'zzzzlast' - for channel in new_ws['channels']: + assert channel["samples"][0]["name"] == "zzzzlast" + for channel in new_ws["channels"]: # check sort - assert channel['samples'][-1]['name'] == 'zzzzlast' + assert channel["samples"][-1]["name"] == "zzzzlast" def test_closure_over_workspace_build(simplemodels_model_data): @@ -935,7 +935,7 @@ def test_wspace_unexpected_keyword_argument(simplemodels_model_data): def test_workspace_without_validation(mocker, simplemodels_model_data): model, data = simplemodels_model_data - mocker.patch('pyhf.schema.validate') + mocker.patch("pyhf.schema.validate") ws = pyhf.Workspace.build(model, data, validate=False) assert pyhf.schema.validate.called is False diff --git a/validation/manualonoff_roofit/onoff.py b/validation/manualonoff_roofit/onoff.py index e30a665d34..cf83d80cc7 100644 --- a/validation/manualonoff_roofit/onoff.py +++ b/validation/manualonoff_roofit/onoff.py @@ -4,16 +4,16 @@ with open("data/source.json", encoding="utf-8") as source_file: d = json.load(source_file) -nobs = d['bindata']['data'][0] -b = d['bindata']['bkg'][0] -deltab = d['bindata']['bkgerr'][0] -s = d['bindata']['sig'][0] +nobs = d["bindata"]["data"][0] +b = d["bindata"]["bkg"][0] +deltab = d["bindata"]["bkgerr"][0] +s = d["bindata"]["sig"][0] # derived data tau = b / deltab / deltab mobs = round(tau * b) -print(f'tau: {tau}, m: {mobs}') +print(f"tau: {tau}, m: {mobs}") w = ROOT.RooWorkspace("w", True) @@ -25,38 +25,38 @@ # ----------------- -w.var('s').setVal(s) -w.var('b').setVal(b) +w.var("s").setVal(s) +w.var("b").setVal(b) -w.var('s').setConstant(True) -w.var('nobs_sr').setVal(nobs) +w.var("s").setConstant(True) +w.var("nobs_sr").setVal(nobs) w.factory("prod:nexp_cr(tau[1],b)") w.factory("Poisson:off_model(nobs_cr[0,1000],nexp_cr)") -w.var('nobs_cr').setVal(mobs) -w.var('nobs_cr').setConstant(True) -w.var('tau').setVal(tau) -w.var('tau').setConstant(True) +w.var("nobs_cr").setVal(mobs) +w.var("nobs_cr").setConstant(True) +w.var("tau").setVal(tau) +w.var("tau").setConstant(True) w.factory("PROD:onoff(on_model,off_model)") data = ROOT.RooDataSet( - 'data', 'data', ROOT.RooArgSet(w.var('nobs_sr'), w.var('nobs_cr')) + "data", "data", ROOT.RooArgSet(w.var("nobs_sr"), w.var("nobs_cr")) ) -data.add(ROOT.RooArgSet(w.var('nobs_sr'), w.var('nobs_cr'))) +data.add(ROOT.RooArgSet(w.var("nobs_sr"), w.var("nobs_cr"))) -getattr(w, 'import')(data) +getattr(w, "import")(data) modelConfig = ROOT.RooStats.ModelConfig(w) -modelConfig.SetPdf(w.pdf('onoff')) -modelConfig.SetParametersOfInterest(ROOT.RooArgSet(w.var('mu'))) -modelConfig.SetNuisanceParameters(ROOT.RooArgSet(w.var('b'))) -modelConfig.SetObservables(ROOT.RooArgSet(w.var('nobs_sr'), w.var('nobs_cr'))) +modelConfig.SetPdf(w.pdf("onoff")) +modelConfig.SetParametersOfInterest(ROOT.RooArgSet(w.var("mu"))) +modelConfig.SetNuisanceParameters(ROOT.RooArgSet(w.var("b"))) +modelConfig.SetObservables(ROOT.RooArgSet(w.var("nobs_sr"), w.var("nobs_cr"))) modelConfig.SetGlobalObservables(ROOT.RooArgSet()) modelConfig.SetName("ModelConfig") -getattr(w, 'import')(modelConfig) +getattr(w, "import")(modelConfig) w.Print() @@ -64,7 +64,7 @@ # model building complete -sbModel = w.obj('ModelConfig') +sbModel = w.obj("ModelConfig") poi = sbModel.GetParametersOfInterest().first() sbModel.SetSnapshot(ROOT.RooArgSet(poi)) @@ -93,8 +93,8 @@ c.SetLogy(False) plot.Draw("OBS EXP CLb 2CL") c.Draw() -c.SaveAs('scan.pdf') +c.SaveAs("scan.pdf") -print(f'observed: {result.UpperLimit()}') +print(f"observed: {result.UpperLimit()}") for i in [-2, -1, 0, 1, 2]: - print(f'expected {i}: {result.GetExpectedUpperLimit(i)}') + print(f"expected {i}: {result.GetExpectedUpperLimit(i)}") diff --git a/validation/multichan_coupledhistosys_histfactory/makedata.py b/validation/multichan_coupledhistosys_histfactory/makedata.py index fdabc01721..6f7209b035 100644 --- a/validation/multichan_coupledhistosys_histfactory/makedata.py +++ b/validation/multichan_coupledhistosys_histfactory/makedata.py @@ -7,18 +7,18 @@ source_data = json.load(source_file) root_file = sys.argv[2] -f = ROOT.TFile(root_file, 'RECREATE') +f = ROOT.TFile(root_file, "RECREATE") hists = [] -for cname, channel_def in source_data['channels'].iteritems(): - print('CH', cname) - binning = channel_def['binning'] - bindata = channel_def['bindata'] +for cname, channel_def in source_data["channels"].iteritems(): + print("CH", cname) + binning = channel_def["binning"] + bindata = channel_def["bindata"] for hist, data in bindata.iteritems(): - print(f'{cname}_{hist}') - h = ROOT.TH1F(f'{cname}_{hist}', f'{cname}_{hist}', *binning) + print(f"{cname}_{hist}") + h = ROOT.TH1F(f"{cname}_{hist}", f"{cname}_{hist}", *binning) hists += [h] for i, v in enumerate(data): h.SetBinContent(i + 1, v) diff --git a/validation/multichan_coupledoverall_histfactory/makedata.py b/validation/multichan_coupledoverall_histfactory/makedata.py index 980b574ddf..41bb4b9a7b 100644 --- a/validation/multichan_coupledoverall_histfactory/makedata.py +++ b/validation/multichan_coupledoverall_histfactory/makedata.py @@ -7,32 +7,32 @@ source_data = json.load(source_file) root_file = sys.argv[2] -f = ROOT.TFile(root_file, 'RECREATE') +f = ROOT.TFile(root_file, "RECREATE") -for cname, channel_def in source_data['channels'].iteritems(): - print('CH', cname) - binning = channel_def['binning'] - bindata = channel_def['bindata'] +for cname, channel_def in source_data["channels"].iteritems(): + print("CH", cname) + binning = channel_def["binning"] + bindata = channel_def["bindata"] - data = ROOT.TH1F(f'{cname}_data', f'{cname}_data', *binning) - for i, v in enumerate(bindata['data']): + data = ROOT.TH1F(f"{cname}_data", f"{cname}_data", *binning) + for i, v in enumerate(bindata["data"]): data.SetBinContent(i + 1, v) data.Sumw2() - bkg1 = ROOT.TH1F(f'{cname}_bkg1', f'{cname}_bkg1', *binning) - for i, v in enumerate(bindata['bkg1']): + bkg1 = ROOT.TH1F(f"{cname}_bkg1", f"{cname}_bkg1", *binning) + for i, v in enumerate(bindata["bkg1"]): bkg1.SetBinContent(i + 1, v) bkg1.Sumw2() - if 'bkg2' in bindata: - bkg2 = ROOT.TH1F(f'{cname}_bkg2', f'{cname}_bkg2', *binning) - for i, v in enumerate(bindata['bkg2']): + if "bkg2" in bindata: + bkg2 = ROOT.TH1F(f"{cname}_bkg2", f"{cname}_bkg2", *binning) + for i, v in enumerate(bindata["bkg2"]): bkg2.SetBinContent(i + 1, v) bkg2.Sumw2() - if 'sig' in bindata: - sig = ROOT.TH1F(f'{cname}_signal', f'{cname}_signal', *binning) - for i, v in enumerate(bindata['sig']): + if "sig" in bindata: + sig = ROOT.TH1F(f"{cname}_signal", f"{cname}_signal", *binning) + for i, v in enumerate(bindata["sig"]): sig.SetBinContent(i + 1, v) sig.Sumw2() f.Write() diff --git a/validation/multichannel_histfactory/makedata.py b/validation/multichannel_histfactory/makedata.py index b24bfc7228..124b98f3f5 100644 --- a/validation/multichannel_histfactory/makedata.py +++ b/validation/multichannel_histfactory/makedata.py @@ -7,36 +7,36 @@ source_data = json.load(source_file) root_file = sys.argv[2] -f = ROOT.TFile(root_file, 'RECREATE') +f = ROOT.TFile(root_file, "RECREATE") -for cname, channel_def in source_data['channels'].iteritems(): - print('CH', cname) - binning = channel_def['binning'] - bindata = channel_def['bindata'] +for cname, channel_def in source_data["channels"].iteritems(): + print("CH", cname) + binning = channel_def["binning"] + bindata = channel_def["bindata"] - data = ROOT.TH1F(f'{cname}_data', f'{cname}_data', *binning) - for i, v in enumerate(bindata['data']): + data = ROOT.TH1F(f"{cname}_data", f"{cname}_data", *binning) + for i, v in enumerate(bindata["data"]): data.SetBinContent(i + 1, v) data.Sumw2() print(data.GetName()) - bkg = ROOT.TH1F(f'{cname}_bkg', f'{cname}_bkg', *binning) - for i, v in enumerate(bindata['bkg']): + bkg = ROOT.TH1F(f"{cname}_bkg", f"{cname}_bkg", *binning) + for i, v in enumerate(bindata["bkg"]): bkg.SetBinContent(i + 1, v) bkg.Sumw2() - if 'bkgerr' in bindata: - bkgerr = ROOT.TH1F(f'{cname}_bkgerr', f'{cname}_bkgerr', *binning) + if "bkgerr" in bindata: + bkgerr = ROOT.TH1F(f"{cname}_bkgerr", f"{cname}_bkgerr", *binning) # shapesys must be as multiplicative factor - for i, v in enumerate(bindata['bkgerr']): + for i, v in enumerate(bindata["bkgerr"]): bkgerr.SetBinContent(i + 1, v / bkg.GetBinContent(i + 1)) bkgerr.Sumw2() - if 'sig' in bindata: - sig = ROOT.TH1F(f'{cname}_signal', f'{cname}_signal', *binning) - for i, v in enumerate(bindata['sig']): + if "sig" in bindata: + sig = ROOT.TH1F(f"{cname}_signal", f"{cname}_signal", *binning) + for i, v in enumerate(bindata["sig"]): sig.SetBinContent(i + 1, v) sig.Sumw2() f.Write() diff --git a/validation/shared_nuispar_across_types/make_data.py b/validation/shared_nuispar_across_types/make_data.py index 889eede5a9..73f65f67c7 100644 --- a/validation/shared_nuispar_across_types/make_data.py +++ b/validation/shared_nuispar_across_types/make_data.py @@ -1,14 +1,14 @@ import ROOT -sig = 'sig', [3, 1] -nom = 'nom', [12, 13] +sig = "sig", [3, 1] +nom = "nom", [12, 13] -histo_up = 'hup', [14, 15] -histo_dn = 'hdn', [10, 11] +histo_up = "hup", [14, 15] +histo_dn = "hdn", [10, 11] -data = 'data', [15, 16] +data = "data", [15, 16] -f = ROOT.TFile.Open('data.root', 'recreate') +f = ROOT.TFile.Open("data.root", "recreate") for n, h in [sig, nom, histo_up, histo_dn, data]: diff --git a/validation/standard_hypo_test_demo.py b/validation/standard_hypo_test_demo.py index e95533a32c..2023bb9835 100644 --- a/validation/standard_hypo_test_demo.py +++ b/validation/standard_hypo_test_demo.py @@ -36,7 +36,7 @@ def standard_hypo_test_demo( profile_ll.SetOneSided(True) calc = ROOT.RooStats.FrequentistCalculator(data, bkg_model, sb_model) - print(f'by hand: {profile_ll.Evaluate(data, ROOT.RooArgSet(_var))}') + print(f"by hand: {profile_ll.Evaluate(data, ROOT.RooArgSet(_var))}") calc.SetToys(ntoys, ntoys) diff --git a/validation/xmlimport_input2/makedata.py b/validation/xmlimport_input2/makedata.py index fdabc01721..6f7209b035 100644 --- a/validation/xmlimport_input2/makedata.py +++ b/validation/xmlimport_input2/makedata.py @@ -7,18 +7,18 @@ source_data = json.load(source_file) root_file = sys.argv[2] -f = ROOT.TFile(root_file, 'RECREATE') +f = ROOT.TFile(root_file, "RECREATE") hists = [] -for cname, channel_def in source_data['channels'].iteritems(): - print('CH', cname) - binning = channel_def['binning'] - bindata = channel_def['bindata'] +for cname, channel_def in source_data["channels"].iteritems(): + print("CH", cname) + binning = channel_def["binning"] + bindata = channel_def["bindata"] for hist, data in bindata.iteritems(): - print(f'{cname}_{hist}') - h = ROOT.TH1F(f'{cname}_{hist}', f'{cname}_{hist}', *binning) + print(f"{cname}_{hist}") + h = ROOT.TH1F(f"{cname}_{hist}", f"{cname}_{hist}", *binning) hists += [h] for i, v in enumerate(data): h.SetBinContent(i + 1, v) From 586e5bf4f8881f40ea193e861de7dbf257eaafbe Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 13:57:02 -0600 Subject: [PATCH 11/23] Place the ignore on the correct line --- src/pyhf/tensor/numpy_backend.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index 5b4ddafd8f..1d1da7c3c1 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -357,8 +357,8 @@ def percentile( # see https://github.com/numpy/numpy/issues/22125 return cast( ArrayLike, - np.percentile(tensor_in, q, axis=axis, interpolation=interpolation), - ) # type: ignore[call-overload] + np.percentile(tensor_in, q, axis=axis, interpolation=interpolation), # type: ignore[call-overload] + ) def stack(self, sequence: Sequence[Tensor[T]], axis: int = 0) -> ArrayLike: return np.stack(sequence, axis=axis) From 90cae1b44793f343e27abbd0d306820db35824cf Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 15:42:36 -0600 Subject: [PATCH 12/23] Disable isort to avoid circular import failure --- src/pyhf/__init__.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 77364d5d3c..78d32fa882 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,11 +1,15 @@ -from pyhf import compat, infer, schema, simplemodels +# FIXME: If import order is changed 'import pyhf' fails due to circular imports +# ruff: isort: off +from pyhf import compat, schema from pyhf._version import version as __version__ from pyhf.optimize import OptimizerRetriever as optimize # noqa -from pyhf.patchset import PatchSet -from pyhf.pdf import Model from pyhf.tensor import BackendRetriever as tensor from pyhf.tensor.manager import get_backend, set_backend +from pyhf.patchset import PatchSet +from pyhf.pdf import Model +from pyhf import infer, simplemodels from pyhf.workspace import Workspace +# ruff: isort: on __all__ = [ "Model", From 371b6e17b25df22e966f7ed48a97e75b2678663e Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 16:02:39 -0600 Subject: [PATCH 13/23] reorder again to prevernt other errors in testing --- src/pyhf/__init__.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 78d32fa882..fa504f70db 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,14 +1,13 @@ # FIXME: If import order is changed 'import pyhf' fails due to circular imports # ruff: isort: off -from pyhf import compat, schema from pyhf._version import version as __version__ -from pyhf.optimize import OptimizerRetriever as optimize # noqa from pyhf.tensor import BackendRetriever as tensor +from pyhf.optimize import OptimizerRetriever as optimize # noqa from pyhf.tensor.manager import get_backend, set_backend -from pyhf.patchset import PatchSet from pyhf.pdf import Model -from pyhf import infer, simplemodels from pyhf.workspace import Workspace +from pyhf import schema, simplemodels, infer, compat +from pyhf.patchset import PatchSet # ruff: isort: on __all__ = [ From 128eee67e998b8baf7eaa628676f7e396db6afe8 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 16:11:32 -0600 Subject: [PATCH 14/23] Apply fix for flake8-bugbear B904 https://docs.astral.sh/ruff/rules/raise-without-from-inside-except/ --- src/pyhf/infer/utils.py | 4 ++-- src/pyhf/interpolators/__init__.py | 4 ++-- src/pyhf/optimize/__init__.py | 6 +++--- src/pyhf/optimize/mixins.py | 4 ++-- src/pyhf/patchset.py | 4 ++-- src/pyhf/schema/validator.py | 2 +- src/pyhf/tensor/__init__.py | 6 +++--- src/pyhf/tensor/manager.py | 8 ++++---- src/pyhf/utils.py | 8 ++++---- src/pyhf/workspace.py | 4 ++-- 10 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/pyhf/infer/utils.py b/src/pyhf/infer/utils.py index 96037b8e6a..0f9576d0d9 100644 --- a/src/pyhf/infer/utils.py +++ b/src/pyhf/infer/utils.py @@ -109,5 +109,5 @@ def get_test_stat(name): } try: return _mapping[name] - except KeyError: - raise InvalidTestStatistic + except KeyError as exc: + raise InvalidTestStatistic from exc diff --git a/src/pyhf/interpolators/__init__.py b/src/pyhf/interpolators/__init__.py index 8e65c6ef4e..2dfc9be714 100644 --- a/src/pyhf/interpolators/__init__.py +++ b/src/pyhf/interpolators/__init__.py @@ -38,8 +38,8 @@ def get(interpcode, do_tensorized_calc=True): try: return interpcodes[interpcode] - except KeyError: - raise exceptions.InvalidInterpCode + except KeyError as exc: + raise exceptions.InvalidInterpCode from exc __all__ = ["code0", "code1", "code2", "code4", "code4p"] diff --git a/src/pyhf/optimize/__init__.py b/src/pyhf/optimize/__init__.py index c055fac8ed..b037871430 100644 --- a/src/pyhf/optimize/__init__.py +++ b/src/pyhf/optimize/__init__.py @@ -26,11 +26,11 @@ def __getattr__(self, name): # for autocomplete and dir() calls self.minuit_optimizer = minuit_optimizer return minuit_optimizer - except ImportError as e: + except ImportError as exc: raise exceptions.ImportBackendError( "There was a problem importing Minuit. The minuit optimizer cannot be used.", - e, - ) + exc, + ) from exc elif name == "__wrapped__": # doctest pass diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index 5f2cfa681a..7696335e80 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -62,9 +62,9 @@ def _internal_minimize( try: assert result.success - except AssertionError: + except AssertionError as exc: log.error(result, exc_info=True) - raise exceptions.FailedMinimization(result) + raise exceptions.FailedMinimization(result) from exc return result def _internal_postprocess(self, fitresult, stitch_pars, return_uncertainties=False): diff --git a/src/pyhf/patchset.py b/src/pyhf/patchset.py index c53e80436c..07c2134b47 100644 --- a/src/pyhf/patchset.py +++ b/src/pyhf/patchset.py @@ -257,10 +257,10 @@ def __getitem__(self, key): key = tuple(key) try: return self._patches_by_key[key] - except KeyError: + except KeyError as exc: raise exceptions.InvalidPatchLookup( f'No patch associated with "{key}" is defined in patchset.' - ) + ) from exc def __iter__(self): """ diff --git a/src/pyhf/schema/validator.py b/src/pyhf/schema/validator.py index f23eef3859..a69873cab9 100644 --- a/src/pyhf/schema/validator.py +++ b/src/pyhf/schema/validator.py @@ -97,4 +97,4 @@ def validate( try: return validator.validate(spec) except jsonschema.ValidationError as err: - raise pyhf.exceptions.InvalidSpecification(err, schema_name) + raise pyhf.exceptions.InvalidSpecification(err, schema_name) from err diff --git a/src/pyhf/tensor/__init__.py b/src/pyhf/tensor/__init__.py index 5e5d77d1d4..4a2e80ebee 100644 --- a/src/pyhf/tensor/__init__.py +++ b/src/pyhf/tensor/__init__.py @@ -33,11 +33,11 @@ def __getattr__(self, name): self._array_types.add(jax_backend.array_type) self._array_subtypes.add(jax_backend.array_subtype) return jax_backend - except ImportError as e: + except ImportError as exc: raise exceptions.ImportBackendError( "There was a problem importing JAX. The jax backend cannot be used.", - e, - ) + exc, + ) from exc @property def array_types(self): diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index 9c162e20b4..aa722439c1 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -105,10 +105,10 @@ def set_backend( new_backend: TensorBackend = getattr( BackendRetriever, f"{backend:s}_backend" )(**backend_kwargs) - except TypeError: + except TypeError as exc: raise exceptions.InvalidBackend( f"The backend provided is not supported: {backend:s}. Select from one of the supported backends: numpy, jax" - ) + ) from exc else: new_backend = backend @@ -139,10 +139,10 @@ def set_backend( new_optimizer = getattr( OptimizerRetriever, f"{custom_optimizer.lower()}_optimizer" )() - except TypeError: + except TypeError as exc: raise exceptions.InvalidOptimizer( f"The optimizer provided is not supported: {custom_optimizer}. Select from one of the supported optimizers: scipy, minuit" - ) + ) from exc else: new_optimizer = custom_optimizer diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index 8a10e411a2..fe07fc83fd 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -83,16 +83,16 @@ def digest(obj, algorithm="sha256"): try: stringified = json.dumps(obj, sort_keys=True, ensure_ascii=False).encode("utf8") - except TypeError: + except TypeError as exc: raise ValueError( "The supplied object is not JSON-serializable for calculating a hash." - ) + ) from exc try: hash_alg = getattr(hashlib, algorithm) - except AttributeError: + except AttributeError as exc: raise ValueError( f"{algorithm} is not an algorithm provided by Python's hashlib library." - ) + ) from exc return hash_alg(stringified).hexdigest() diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 8ce2252cd3..4c1ce8966b 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -387,10 +387,10 @@ def get_measurement(self, measurement_name=None, measurement_index=None): ) try: measurement = self["measurements"][measurement_index] - except IndexError: + except IndexError as exc: raise exceptions.InvalidMeasurement( f"The measurement index {measurement_index} is out of bounds as only {len(self.measurement_names)} measurement(s) have been defined." - ) + ) from exc else: raise exceptions.InvalidMeasurement("No measurements have been defined.") From da6f6917fbc2eae1a84ccb42af8ec054a7d2de83 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:08:47 -0600 Subject: [PATCH 15/23] fix: Apply flake8-bugbear B007 for docs B007 Loop control variable not used within loop body https://docs.astral.sh/ruff/rules/unused-loop-control-variable/ --- .../binderexample/StatisticalAnalysis.ipynb | 2854 +---------------- .../learn/TensorizingInterpolations.ipynb | 10 +- 2 files changed, 10 insertions(+), 2854 deletions(-) diff --git a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb index 6915f5378a..6aa358bb28 100644 --- a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb +++ b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb @@ -170,7 +170,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -197,7 +197,7 @@ " x = np.arange(nbins)\n", " data = np.zeros(nbins)\n", " items = []\n", - " for i in [3, 2, 1, 0]:\n", + " for _ in [3, 2, 1, 0]:\n", " items.append(ax.bar(x, data, 1, alpha=1.0))\n", " animate_plot_pieces = (\n", " items,\n", @@ -263,955 +263,7 @@ "outputs": [ { "data": { - "application/javascript": [ - "/* Put everything inside the global mpl namespace */\n", - "/* global mpl */\n", - "window.mpl = {};\n", - "\n", - "mpl.get_websocket_type = function () {\n", - " if (typeof WebSocket !== 'undefined') {\n", - " return WebSocket;\n", - " } else if (typeof MozWebSocket !== 'undefined') {\n", - " return MozWebSocket;\n", - " } else {\n", - " alert(\n", - " 'Your browser does not have WebSocket support. ' +\n", - " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", - " 'Firefox 4 and 5 are also supported but you ' +\n", - " 'have to enable WebSockets in about:config.'\n", - " );\n", - " }\n", - "};\n", - "\n", - "mpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n", - " this.id = figure_id;\n", - "\n", - " this.ws = websocket;\n", - "\n", - " this.supports_binary = this.ws.binaryType !== undefined;\n", - "\n", - " if (!this.supports_binary) {\n", - " var warnings = document.getElementById('mpl-warnings');\n", - " if (warnings) {\n", - " warnings.style.display = 'block';\n", - " warnings.textContent =\n", - " 'This browser does not support binary websocket messages. ' +\n", - " 'Performance may be slow.';\n", - " }\n", - " }\n", - "\n", - " this.imageObj = new Image();\n", - "\n", - " this.context = undefined;\n", - " this.message = undefined;\n", - " this.canvas = undefined;\n", - " this.rubberband_canvas = undefined;\n", - " this.rubberband_context = undefined;\n", - " this.format_dropdown = undefined;\n", - "\n", - " this.image_mode = 'full';\n", - "\n", - " this.root = document.createElement('div');\n", - " this.root.setAttribute('style', 'display: inline-block');\n", - " this._root_extra_style(this.root);\n", - "\n", - " parent_element.appendChild(this.root);\n", - "\n", - " this._init_header(this);\n", - " this._init_canvas(this);\n", - " this._init_toolbar(this);\n", - "\n", - " var fig = this;\n", - "\n", - " this.waiting = false;\n", - "\n", - " this.ws.onopen = function () {\n", - " fig.send_message('supports_binary', { value: fig.supports_binary });\n", - " fig.send_message('send_image_mode', {});\n", - " if (fig.ratio !== 1) {\n", - " fig.send_message('set_dpi_ratio', { dpi_ratio: fig.ratio });\n", - " }\n", - " fig.send_message('refresh', {});\n", - " };\n", - "\n", - " this.imageObj.onload = function () {\n", - " if (fig.image_mode === 'full') {\n", - " // Full images could contain transparency (where diff images\n", - " // almost always do), so we need to clear the canvas so that\n", - " // there is no ghosting.\n", - " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", - " }\n", - " fig.context.drawImage(fig.imageObj, 0, 0);\n", - " };\n", - "\n", - " this.imageObj.onunload = function () {\n", - " fig.ws.close();\n", - " };\n", - "\n", - " this.ws.onmessage = this._make_on_message_function(this);\n", - "\n", - " this.ondownload = ondownload;\n", - "};\n", - "\n", - "mpl.figure.prototype._init_header = function () {\n", - " var titlebar = document.createElement('div');\n", - " titlebar.classList =\n", - " 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n", - " var titletext = document.createElement('div');\n", - " titletext.classList = 'ui-dialog-title';\n", - " titletext.setAttribute(\n", - " 'style',\n", - " 'width: 100%; text-align: center; padding: 3px;'\n", - " );\n", - " titlebar.appendChild(titletext);\n", - " this.root.appendChild(titlebar);\n", - " this.header = titletext;\n", - "};\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n", - "\n", - "mpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n", - "\n", - "mpl.figure.prototype._init_canvas = function () {\n", - " var fig = this;\n", - "\n", - " var canvas_div = (this.canvas_div = document.createElement('div'));\n", - " canvas_div.setAttribute(\n", - " 'style',\n", - " 'border: 1px solid #ddd;' +\n", - " 'box-sizing: content-box;' +\n", - " 'clear: both;' +\n", - " 'min-height: 1px;' +\n", - " 'min-width: 1px;' +\n", - " 'outline: 0;' +\n", - " 'overflow: hidden;' +\n", - " 'position: relative;' +\n", - " 'resize: both;'\n", - " );\n", - "\n", - " function on_keyboard_event_closure(name) {\n", - " return function (event) {\n", - " return fig.key_event(event, name);\n", - " };\n", - " }\n", - "\n", - " canvas_div.addEventListener(\n", - " 'keydown',\n", - " on_keyboard_event_closure('key_press')\n", - " );\n", - " canvas_div.addEventListener(\n", - " 'keyup',\n", - " on_keyboard_event_closure('key_release')\n", - " );\n", - "\n", - " this._canvas_extra_style(canvas_div);\n", - " this.root.appendChild(canvas_div);\n", - "\n", - " var canvas = (this.canvas = document.createElement('canvas'));\n", - " canvas.classList.add('mpl-canvas');\n", - " canvas.setAttribute('style', 'box-sizing: content-box;');\n", - "\n", - " this.context = canvas.getContext('2d');\n", - "\n", - " var backingStore =\n", - " this.context.backingStorePixelRatio ||\n", - " this.context.webkitBackingStorePixelRatio ||\n", - " this.context.mozBackingStorePixelRatio ||\n", - " this.context.msBackingStorePixelRatio ||\n", - " this.context.oBackingStorePixelRatio ||\n", - " this.context.backingStorePixelRatio ||\n", - " 1;\n", - "\n", - " this.ratio = (window.devicePixelRatio || 1) / backingStore;\n", - "\n", - " var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n", - " 'canvas'\n", - " ));\n", - " rubberband_canvas.setAttribute(\n", - " 'style',\n", - " 'box-sizing: content-box; position: absolute; left: 0; top: 0; z-index: 1;'\n", - " );\n", - "\n", - " // Apply a ponyfill if ResizeObserver is not implemented by browser.\n", - " if (this.ResizeObserver === undefined) {\n", - " if (window.ResizeObserver !== undefined) {\n", - " this.ResizeObserver = window.ResizeObserver;\n", - " } else {\n", - " var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n", - " this.ResizeObserver = obs.ResizeObserver;\n", - " }\n", - " }\n", - "\n", - " this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n", - " var nentries = entries.length;\n", - " for (var i = 0; i < nentries; i++) {\n", - " var entry = entries[i];\n", - " var width, height;\n", - " if (entry.contentBoxSize) {\n", - " if (entry.contentBoxSize instanceof Array) {\n", - " // Chrome 84 implements new version of spec.\n", - " width = entry.contentBoxSize[0].inlineSize;\n", - " height = entry.contentBoxSize[0].blockSize;\n", - " } else {\n", - " // Firefox implements old version of spec.\n", - " width = entry.contentBoxSize.inlineSize;\n", - " height = entry.contentBoxSize.blockSize;\n", - " }\n", - " } else {\n", - " // Chrome <84 implements even older version of spec.\n", - " width = entry.contentRect.width;\n", - " height = entry.contentRect.height;\n", - " }\n", - "\n", - " // Keep the size of the canvas and rubber band canvas in sync with\n", - " // the canvas container.\n", - " if (entry.devicePixelContentBoxSize) {\n", - " // Chrome 84 implements new version of spec.\n", - " canvas.setAttribute(\n", - " 'width',\n", - " entry.devicePixelContentBoxSize[0].inlineSize\n", - " );\n", - " canvas.setAttribute(\n", - " 'height',\n", - " entry.devicePixelContentBoxSize[0].blockSize\n", - " );\n", - " } else {\n", - " canvas.setAttribute('width', width * fig.ratio);\n", - " canvas.setAttribute('height', height * fig.ratio);\n", - " }\n", - " canvas.setAttribute(\n", - " 'style',\n", - " 'width: ' + width + 'px; height: ' + height + 'px;'\n", - " );\n", - "\n", - " rubberband_canvas.setAttribute('width', width);\n", - " rubberband_canvas.setAttribute('height', height);\n", - "\n", - " // And update the size in Python. We ignore the initial 0/0 size\n", - " // that occurs as the element is placed into the DOM, which should\n", - " // otherwise not happen due to the minimum size styling.\n", - " if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n", - " fig.request_resize(width, height);\n", - " }\n", - " }\n", - " });\n", - " this.resizeObserverInstance.observe(canvas_div);\n", - "\n", - " function on_mouse_event_closure(name) {\n", - " return function (event) {\n", - " return fig.mouse_event(event, name);\n", - " };\n", - " }\n", - "\n", - " rubberband_canvas.addEventListener(\n", - " 'mousedown',\n", - " on_mouse_event_closure('button_press')\n", - " );\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseup',\n", - " on_mouse_event_closure('button_release')\n", - " );\n", - " // Throttle sequential mouse events to 1 every 20ms.\n", - " rubberband_canvas.addEventListener(\n", - " 'mousemove',\n", - " on_mouse_event_closure('motion_notify')\n", - " );\n", - "\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseenter',\n", - " on_mouse_event_closure('figure_enter')\n", - " );\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseleave',\n", - " on_mouse_event_closure('figure_leave')\n", - " );\n", - "\n", - " canvas_div.addEventListener('wheel', function (event) {\n", - " if (event.deltaY < 0) {\n", - " event.step = 1;\n", - " } else {\n", - " event.step = -1;\n", - " }\n", - " on_mouse_event_closure('scroll')(event);\n", - " });\n", - "\n", - " canvas_div.appendChild(canvas);\n", - " canvas_div.appendChild(rubberband_canvas);\n", - "\n", - " this.rubberband_context = rubberband_canvas.getContext('2d');\n", - " this.rubberband_context.strokeStyle = '#000000';\n", - "\n", - " this._resize_canvas = function (width, height, forward) {\n", - " if (forward) {\n", - " canvas_div.style.width = width + 'px';\n", - " canvas_div.style.height = height + 'px';\n", - " }\n", - " };\n", - "\n", - " // Disable right mouse context menu.\n", - " this.rubberband_canvas.addEventListener('contextmenu', function (_e) {\n", - " event.preventDefault();\n", - " return false;\n", - " });\n", - "\n", - " function set_focus() {\n", - " canvas.focus();\n", - " canvas_div.focus();\n", - " }\n", - "\n", - " window.setTimeout(set_focus, 100);\n", - "};\n", - "\n", - "mpl.figure.prototype._init_toolbar = function () {\n", - " var fig = this;\n", - "\n", - " var toolbar = document.createElement('div');\n", - " toolbar.classList = 'mpl-toolbar';\n", - " this.root.appendChild(toolbar);\n", - "\n", - " function on_click_closure(name) {\n", - " return function (_event) {\n", - " return fig.toolbar_button_onclick(name);\n", - " };\n", - " }\n", - "\n", - " function on_mouseover_closure(tooltip) {\n", - " return function (event) {\n", - " if (!event.currentTarget.disabled) {\n", - " return fig.toolbar_button_onmouseover(tooltip);\n", - " }\n", - " };\n", - " }\n", - "\n", - " fig.buttons = {};\n", - " var buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'mpl-button-group';\n", - " for (var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " /* Instead of a spacer, we start a new button group. */\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - " buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'mpl-button-group';\n", - " continue;\n", - " }\n", - "\n", - " var button = (fig.buttons[name] = document.createElement('button'));\n", - " button.classList = 'mpl-widget';\n", - " button.setAttribute('role', 'button');\n", - " button.setAttribute('aria-disabled', 'false');\n", - " button.addEventListener('click', on_click_closure(method_name));\n", - " button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n", - "\n", - " var icon_img = document.createElement('img');\n", - " icon_img.src = '_images/' + image + '.png';\n", - " icon_img.srcset = '_images/' + image + '_large.png 2x';\n", - " icon_img.alt = tooltip;\n", - " button.appendChild(icon_img);\n", - "\n", - " buttonGroup.appendChild(button);\n", - " }\n", - "\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - "\n", - " var fmt_picker = document.createElement('select');\n", - " fmt_picker.classList = 'mpl-widget';\n", - " toolbar.appendChild(fmt_picker);\n", - " this.format_dropdown = fmt_picker;\n", - "\n", - " for (var ind in mpl.extensions) {\n", - " var fmt = mpl.extensions[ind];\n", - " var option = document.createElement('option');\n", - " option.selected = fmt === mpl.default_extension;\n", - " option.innerHTML = fmt;\n", - " fmt_picker.appendChild(option);\n", - " }\n", - "\n", - " var status_bar = document.createElement('span');\n", - " status_bar.classList = 'mpl-message';\n", - " toolbar.appendChild(status_bar);\n", - " this.message = status_bar;\n", - "};\n", - "\n", - "mpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n", - " // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n", - " // which will in turn request a refresh of the image.\n", - " this.send_message('resize', { width: x_pixels, height: y_pixels });\n", - "};\n", - "\n", - "mpl.figure.prototype.send_message = function (type, properties) {\n", - " properties['type'] = type;\n", - " properties['figure_id'] = this.id;\n", - " this.ws.send(JSON.stringify(properties));\n", - "};\n", - "\n", - "mpl.figure.prototype.send_draw_message = function () {\n", - " if (!this.waiting) {\n", - " this.waiting = true;\n", - " this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_save = function (fig, _msg) {\n", - " var format_dropdown = fig.format_dropdown;\n", - " var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n", - " fig.ondownload(fig, format);\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_resize = function (fig, msg) {\n", - " var size = msg['size'];\n", - " if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n", - " fig._resize_canvas(size[0], size[1], msg['forward']);\n", - " fig.send_message('refresh', {});\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_rubberband = function (fig, msg) {\n", - " var x0 = msg['x0'] / fig.ratio;\n", - " var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n", - " var x1 = msg['x1'] / fig.ratio;\n", - " var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n", - " x0 = Math.floor(x0) + 0.5;\n", - " y0 = Math.floor(y0) + 0.5;\n", - " x1 = Math.floor(x1) + 0.5;\n", - " y1 = Math.floor(y1) + 0.5;\n", - " var min_x = Math.min(x0, x1);\n", - " var min_y = Math.min(y0, y1);\n", - " var width = Math.abs(x1 - x0);\n", - " var height = Math.abs(y1 - y0);\n", - "\n", - " fig.rubberband_context.clearRect(\n", - " 0,\n", - " 0,\n", - " fig.canvas.width / fig.ratio,\n", - " fig.canvas.height / fig.ratio\n", - " );\n", - "\n", - " fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_figure_label = function (fig, msg) {\n", - " // Updates the figure title.\n", - " fig.header.textContent = msg['label'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_cursor = function (fig, msg) {\n", - " var cursor = msg['cursor'];\n", - " switch (cursor) {\n", - " case 0:\n", - " cursor = 'pointer';\n", - " break;\n", - " case 1:\n", - " cursor = 'default';\n", - " break;\n", - " case 2:\n", - " cursor = 'crosshair';\n", - " break;\n", - " case 3:\n", - " cursor = 'move';\n", - " break;\n", - " }\n", - " fig.rubberband_canvas.style.cursor = cursor;\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_message = function (fig, msg) {\n", - " fig.message.textContent = msg['message'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_draw = function (fig, _msg) {\n", - " // Request the server to send over a new figure.\n", - " fig.send_draw_message();\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_image_mode = function (fig, msg) {\n", - " fig.image_mode = msg['mode'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n", - " for (var key in msg) {\n", - " if (!(key in fig.buttons)) {\n", - " continue;\n", - " }\n", - " fig.buttons[key].disabled = !msg[key];\n", - " fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n", - " if (msg['mode'] === 'PAN') {\n", - " fig.buttons['Pan'].classList.add('active');\n", - " fig.buttons['Zoom'].classList.remove('active');\n", - " } else if (msg['mode'] === 'ZOOM') {\n", - " fig.buttons['Pan'].classList.remove('active');\n", - " fig.buttons['Zoom'].classList.add('active');\n", - " } else {\n", - " fig.buttons['Pan'].classList.remove('active');\n", - " fig.buttons['Zoom'].classList.remove('active');\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.updated_canvas_event = function () {\n", - " // Called whenever the canvas gets updated.\n", - " this.send_message('ack', {});\n", - "};\n", - "\n", - "// A function to construct a web socket function for onmessage handling.\n", - "// Called in the figure constructor.\n", - "mpl.figure.prototype._make_on_message_function = function (fig) {\n", - " return function socket_on_message(evt) {\n", - " if (evt.data instanceof Blob) {\n", - " /* FIXME: We get \"Resource interpreted as Image but\n", - " * transferred with MIME type text/plain:\" errors on\n", - " * Chrome. But how to set the MIME type? It doesn't seem\n", - " * to be part of the websocket stream */\n", - " evt.data.type = 'image/png';\n", - "\n", - " /* Free the memory for the previous frames */\n", - " if (fig.imageObj.src) {\n", - " (window.URL || window.webkitURL).revokeObjectURL(\n", - " fig.imageObj.src\n", - " );\n", - " }\n", - "\n", - " fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n", - " evt.data\n", - " );\n", - " fig.updated_canvas_event();\n", - " fig.waiting = false;\n", - " return;\n", - " } else if (\n", - " typeof evt.data === 'string' &&\n", - " evt.data.slice(0, 21) === 'data:image/png;base64'\n", - " ) {\n", - " fig.imageObj.src = evt.data;\n", - " fig.updated_canvas_event();\n", - " fig.waiting = false;\n", - " return;\n", - " }\n", - "\n", - " var msg = JSON.parse(evt.data);\n", - " var msg_type = msg['type'];\n", - "\n", - " // Call the \"handle_{type}\" callback, which takes\n", - " // the figure and JSON message as its only arguments.\n", - " try {\n", - " var callback = fig['handle_' + msg_type];\n", - " } catch (e) {\n", - " console.log(\n", - " \"No handler for the '\" + msg_type + \"' message type: \",\n", - " msg\n", - " );\n", - " return;\n", - " }\n", - "\n", - " if (callback) {\n", - " try {\n", - " // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n", - " callback(fig, msg);\n", - " } catch (e) {\n", - " console.log(\n", - " \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n", - " e,\n", - " e.stack,\n", - " msg\n", - " );\n", - " }\n", - " }\n", - " };\n", - "};\n", - "\n", - "// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\n", - "mpl.findpos = function (e) {\n", - " //this section is from http://www.quirksmode.org/js/events_properties.html\n", - " var targ;\n", - " if (!e) {\n", - " e = window.event;\n", - " }\n", - " if (e.target) {\n", - " targ = e.target;\n", - " } else if (e.srcElement) {\n", - " targ = e.srcElement;\n", - " }\n", - " if (targ.nodeType === 3) {\n", - " // defeat Safari bug\n", - " targ = targ.parentNode;\n", - " }\n", - "\n", - " // pageX,Y are the mouse positions relative to the document\n", - " var boundingRect = targ.getBoundingClientRect();\n", - " var x = e.pageX - (boundingRect.left + document.body.scrollLeft);\n", - " var y = e.pageY - (boundingRect.top + document.body.scrollTop);\n", - "\n", - " return { x: x, y: y };\n", - "};\n", - "\n", - "/*\n", - " * return a copy of an object with only non-object keys\n", - " * we need this to avoid circular references\n", - " * http://stackoverflow.com/a/24161582/3208463\n", - " */\n", - "function simpleKeys(original) {\n", - " return Object.keys(original).reduce(function (obj, key) {\n", - " if (typeof original[key] !== 'object') {\n", - " obj[key] = original[key];\n", - " }\n", - " return obj;\n", - " }, {});\n", - "}\n", - "\n", - "mpl.figure.prototype.mouse_event = function (event, name) {\n", - " var canvas_pos = mpl.findpos(event);\n", - "\n", - " if (name === 'button_press') {\n", - " this.canvas.focus();\n", - " this.canvas_div.focus();\n", - " }\n", - "\n", - " var x = canvas_pos.x * this.ratio;\n", - " var y = canvas_pos.y * this.ratio;\n", - "\n", - " this.send_message(name, {\n", - " x: x,\n", - " y: y,\n", - " button: event.button,\n", - " step: event.step,\n", - " guiEvent: simpleKeys(event),\n", - " });\n", - "\n", - " /* This prevents the web browser from automatically changing to\n", - " * the text insertion cursor when the button is pressed. We want\n", - " * to control all of the cursor setting manually through the\n", - " * 'cursor' event from matplotlib */\n", - " event.preventDefault();\n", - " return false;\n", - "};\n", - "\n", - "mpl.figure.prototype._key_event_extra = function (_event, _name) {\n", - " // Handle any extra behaviour associated with a key event\n", - "};\n", - "\n", - "mpl.figure.prototype.key_event = function (event, name) {\n", - " // Prevent repeat events\n", - " if (name === 'key_press') {\n", - " if (event.which === this._key) {\n", - " return;\n", - " } else {\n", - " this._key = event.which;\n", - " }\n", - " }\n", - " if (name === 'key_release') {\n", - " this._key = null;\n", - " }\n", - "\n", - " var value = '';\n", - " if (event.ctrlKey && event.which !== 17) {\n", - " value += 'ctrl+';\n", - " }\n", - " if (event.altKey && event.which !== 18) {\n", - " value += 'alt+';\n", - " }\n", - " if (event.shiftKey && event.which !== 16) {\n", - " value += 'shift+';\n", - " }\n", - "\n", - " value += 'k';\n", - " value += event.which.toString();\n", - "\n", - " this._key_event_extra(event, name);\n", - "\n", - " this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n", - " return false;\n", - "};\n", - "\n", - "mpl.figure.prototype.toolbar_button_onclick = function (name) {\n", - " if (name === 'download') {\n", - " this.handle_save(this, null);\n", - " } else {\n", - " this.send_message('toolbar_button', { name: name });\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n", - " this.message.textContent = tooltip;\n", - "};\n", - "\n", - "///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n", - "// prettier-ignore\n", - "var _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\n", - "mpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n", - "\n", - "mpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n", - "\n", - "mpl.default_extension = \"png\";/* global mpl */\n", - "\n", - "var comm_websocket_adapter = function (comm) {\n", - " // Create a \"websocket\"-like object which calls the given IPython comm\n", - " // object with the appropriate methods. Currently this is a non binary\n", - " // socket, so there is still some room for performance tuning.\n", - " var ws = {};\n", - "\n", - " ws.close = function () {\n", - " comm.close();\n", - " };\n", - " ws.send = function (m) {\n", - " //console.log('sending', m);\n", - " comm.send(m);\n", - " };\n", - " // Register the callback with on_msg.\n", - " comm.on_msg(function (msg) {\n", - " //console.log('receiving', msg['content']['data'], msg);\n", - " // Pass the mpl event to the overridden (by mpl) onmessage function.\n", - " ws.onmessage(msg['content']['data']);\n", - " });\n", - " return ws;\n", - "};\n", - "\n", - "mpl.mpl_figure_comm = function (comm, msg) {\n", - " // This is the function which gets called when the mpl process\n", - " // starts-up an IPython Comm through the \"matplotlib\" channel.\n", - "\n", - " var id = msg.content.data.id;\n", - " // Get hold of the div created by the display call when the Comm\n", - " // socket was opened in Python.\n", - " var element = document.getElementById(id);\n", - " var ws_proxy = comm_websocket_adapter(comm);\n", - "\n", - " function ondownload(figure, _format) {\n", - " window.open(figure.canvas.toDataURL());\n", - " }\n", - "\n", - " var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n", - "\n", - " // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n", - " // web socket which is closed, not our websocket->open comm proxy.\n", - " ws_proxy.onopen();\n", - "\n", - " fig.parent_element = element;\n", - " fig.cell_info = mpl.find_output_cell(\"
\");\n", - " if (!fig.cell_info) {\n", - " console.error('Failed to find cell for figure', id, fig);\n", - " return;\n", - " }\n", - " fig.cell_info[0].output_area.element.on(\n", - " 'cleared',\n", - " { fig: fig },\n", - " fig._remove_fig_handler\n", - " );\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_close = function (fig, msg) {\n", - " var width = fig.canvas.width / fig.ratio;\n", - " fig.cell_info[0].output_area.element.off(\n", - " 'cleared',\n", - " fig._remove_fig_handler\n", - " );\n", - " fig.resizeObserverInstance.unobserve(fig.canvas_div);\n", - "\n", - " // Update the output cell to use the data from the current canvas.\n", - " fig.push_to_output();\n", - " var dataURL = fig.canvas.toDataURL();\n", - " // Re-enable the keyboard manager in IPython - without this line, in FF,\n", - " // the notebook keyboard shortcuts fail.\n", - " IPython.keyboard_manager.enable();\n", - " fig.parent_element.innerHTML =\n", - " '';\n", - " fig.close_ws(fig, msg);\n", - "};\n", - "\n", - "mpl.figure.prototype.close_ws = function (fig, msg) {\n", - " fig.send_message('closing', msg);\n", - " // fig.ws.close()\n", - "};\n", - "\n", - "mpl.figure.prototype.push_to_output = function (_remove_interactive) {\n", - " // Turn the data on the canvas into data in the output cell.\n", - " var width = this.canvas.width / this.ratio;\n", - " var dataURL = this.canvas.toDataURL();\n", - " this.cell_info[1]['text/html'] =\n", - " '';\n", - "};\n", - "\n", - "mpl.figure.prototype.updated_canvas_event = function () {\n", - " // Tell IPython that the notebook contents must change.\n", - " IPython.notebook.set_dirty(true);\n", - " this.send_message('ack', {});\n", - " var fig = this;\n", - " // Wait a second, then push the new image to the DOM so\n", - " // that it is saved nicely (might be nice to debounce this).\n", - " setTimeout(function () {\n", - " fig.push_to_output();\n", - " }, 1000);\n", - "};\n", - "\n", - "mpl.figure.prototype._init_toolbar = function () {\n", - " var fig = this;\n", - "\n", - " var toolbar = document.createElement('div');\n", - " toolbar.classList = 'btn-toolbar';\n", - " this.root.appendChild(toolbar);\n", - "\n", - " function on_click_closure(name) {\n", - " return function (_event) {\n", - " return fig.toolbar_button_onclick(name);\n", - " };\n", - " }\n", - "\n", - " function on_mouseover_closure(tooltip) {\n", - " return function (event) {\n", - " if (!event.currentTarget.disabled) {\n", - " return fig.toolbar_button_onmouseover(tooltip);\n", - " }\n", - " };\n", - " }\n", - "\n", - " fig.buttons = {};\n", - " var buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'btn-group';\n", - " var button;\n", - " for (var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " /* Instead of a spacer, we start a new button group. */\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - " buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'btn-group';\n", - " continue;\n", - " }\n", - "\n", - " button = fig.buttons[name] = document.createElement('button');\n", - " button.classList = 'btn btn-default';\n", - " button.href = '#';\n", - " button.title = name;\n", - " button.innerHTML = '';\n", - " button.addEventListener('click', on_click_closure(method_name));\n", - " button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n", - " buttonGroup.appendChild(button);\n", - " }\n", - "\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - "\n", - " // Add the status bar.\n", - " var status_bar = document.createElement('span');\n", - " status_bar.classList = 'mpl-message pull-right';\n", - " toolbar.appendChild(status_bar);\n", - " this.message = status_bar;\n", - "\n", - " // Add the close button to the window.\n", - " var buttongrp = document.createElement('div');\n", - " buttongrp.classList = 'btn-group inline pull-right';\n", - " button = document.createElement('button');\n", - " button.classList = 'btn btn-mini btn-primary';\n", - " button.href = '#';\n", - " button.title = 'Stop Interaction';\n", - " button.innerHTML = '';\n", - " button.addEventListener('click', function (_evt) {\n", - " fig.handle_close(fig, {});\n", - " });\n", - " button.addEventListener(\n", - " 'mouseover',\n", - " on_mouseover_closure('Stop Interaction')\n", - " );\n", - " buttongrp.appendChild(button);\n", - " var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n", - " titlebar.insertBefore(buttongrp, titlebar.firstChild);\n", - "};\n", - "\n", - "mpl.figure.prototype._remove_fig_handler = function (event) {\n", - " var fig = event.data.fig;\n", - " if (event.target !== this) {\n", - " // Ignore bubbled events from children.\n", - " return;\n", - " }\n", - " fig.close_ws(fig, {});\n", - "};\n", - "\n", - "mpl.figure.prototype._root_extra_style = function (el) {\n", - " el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n", - "};\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function (el) {\n", - " // this is important to make the div 'focusable\n", - " el.setAttribute('tabindex', 0);\n", - " // reach out to IPython and tell the keyboard manager to turn it's self\n", - " // off when our div gets focus\n", - "\n", - " // location in version 3\n", - " if (IPython.notebook.keyboard_manager) {\n", - " IPython.notebook.keyboard_manager.register_events(el);\n", - " } else {\n", - " // location in version 2\n", - " IPython.keyboard_manager.register_events(el);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype._key_event_extra = function (event, _name) {\n", - " var manager = IPython.notebook.keyboard_manager;\n", - " if (!manager) {\n", - " manager = IPython.keyboard_manager;\n", - " }\n", - "\n", - " // Check for shift+enter\n", - " if (event.shiftKey && event.which === 13) {\n", - " this.canvas_div.blur();\n", - " // select the cell after this one\n", - " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", - " IPython.notebook.select(index + 1);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_save = function (fig, _msg) {\n", - " fig.ondownload(fig, null);\n", - "};\n", - "\n", - "mpl.find_output_cell = function (html_output) {\n", - " // Return the cell and output element which can be found *uniquely* in the notebook.\n", - " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", - " // IPython event is triggered only after the cells have been serialised, which for\n", - " // our purposes (turning an active figure into a static one), is too late.\n", - " var cells = IPython.notebook.get_cells();\n", - " var ncells = cells.length;\n", - " for (var i = 0; i < ncells; i++) {\n", - " var cell = cells[i];\n", - " if (cell.cell_type === 'code') {\n", - " for (var j = 0; j < cell.output_area.outputs.length; j++) {\n", - " var data = cell.output_area.outputs[j];\n", - " if (data.data) {\n", - " // IPython >= 3 moved mimebundle to data attribute of output\n", - " data = data.data;\n", - " }\n", - " if (data['text/html'] === html_output) {\n", - " return [cell, data, j];\n", - " }\n", - " }\n", - " }\n", - " }\n", - "};\n", - "\n", - "// Register the function which deals with the matplotlib target/channel.\n", - "// The kernel may be null if the page has been refreshed.\n", - "if (IPython.notebook.kernel !== null) {\n", - " IPython.notebook.kernel.comm_manager.register_target(\n", - " 'matplotlib',\n", - " mpl.mpl_figure_comm\n", - " );\n", - "}\n" - ], + "application/javascript": "/* Put everything inside the global mpl namespace */\n/* global mpl */\nwindow.mpl = {};\n\nmpl.get_websocket_type = function () {\n if (typeof WebSocket !== 'undefined') {\n return WebSocket;\n } else if (typeof MozWebSocket !== 'undefined') {\n return MozWebSocket;\n } else {\n alert(\n 'Your browser does not have WebSocket support. ' +\n 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n 'Firefox 4 and 5 are also supported but you ' +\n 'have to enable WebSockets in about:config.'\n );\n }\n};\n\nmpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n this.id = figure_id;\n\n this.ws = websocket;\n\n this.supports_binary = this.ws.binaryType !== undefined;\n\n if (!this.supports_binary) {\n var warnings = document.getElementById('mpl-warnings');\n if (warnings) {\n warnings.style.display = 'block';\n warnings.textContent =\n 'This browser does not support binary websocket messages. ' +\n 'Performance may be slow.';\n }\n }\n\n this.imageObj = new Image();\n\n this.context = undefined;\n this.message = undefined;\n this.canvas = undefined;\n this.rubberband_canvas = undefined;\n this.rubberband_context = undefined;\n this.format_dropdown = undefined;\n\n this.image_mode = 'full';\n\n this.root = document.createElement('div');\n this.root.setAttribute('style', 'display: inline-block');\n this._root_extra_style(this.root);\n\n parent_element.appendChild(this.root);\n\n this._init_header(this);\n this._init_canvas(this);\n this._init_toolbar(this);\n\n var fig = this;\n\n this.waiting = false;\n\n this.ws.onopen = function () {\n fig.send_message('supports_binary', { value: fig.supports_binary });\n fig.send_message('send_image_mode', {});\n if (fig.ratio !== 1) {\n fig.send_message('set_dpi_ratio', { dpi_ratio: fig.ratio });\n }\n fig.send_message('refresh', {});\n };\n\n this.imageObj.onload = function () {\n if (fig.image_mode === 'full') {\n // Full images could contain transparency (where diff images\n // almost always do), so we need to clear the canvas so that\n // there is no ghosting.\n fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n }\n fig.context.drawImage(fig.imageObj, 0, 0);\n };\n\n this.imageObj.onunload = function () {\n fig.ws.close();\n };\n\n this.ws.onmessage = this._make_on_message_function(this);\n\n this.ondownload = ondownload;\n};\n\nmpl.figure.prototype._init_header = function () {\n var titlebar = document.createElement('div');\n titlebar.classList =\n 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n var titletext = document.createElement('div');\n titletext.classList = 'ui-dialog-title';\n titletext.setAttribute(\n 'style',\n 'width: 100%; text-align: center; padding: 3px;'\n );\n titlebar.appendChild(titletext);\n this.root.appendChild(titlebar);\n this.header = titletext;\n};\n\nmpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._init_canvas = function () {\n var fig = this;\n\n var canvas_div = (this.canvas_div = document.createElement('div'));\n canvas_div.setAttribute(\n 'style',\n 'border: 1px solid #ddd;' +\n 'box-sizing: content-box;' +\n 'clear: both;' +\n 'min-height: 1px;' +\n 'min-width: 1px;' +\n 'outline: 0;' +\n 'overflow: hidden;' +\n 'position: relative;' +\n 'resize: both;'\n );\n\n function on_keyboard_event_closure(name) {\n return function (event) {\n return fig.key_event(event, name);\n };\n }\n\n canvas_div.addEventListener(\n 'keydown',\n on_keyboard_event_closure('key_press')\n );\n canvas_div.addEventListener(\n 'keyup',\n on_keyboard_event_closure('key_release')\n );\n\n this._canvas_extra_style(canvas_div);\n this.root.appendChild(canvas_div);\n\n var canvas = (this.canvas = document.createElement('canvas'));\n canvas.classList.add('mpl-canvas');\n canvas.setAttribute('style', 'box-sizing: content-box;');\n\n this.context = canvas.getContext('2d');\n\n var backingStore =\n this.context.backingStorePixelRatio ||\n this.context.webkitBackingStorePixelRatio ||\n this.context.mozBackingStorePixelRatio ||\n this.context.msBackingStorePixelRatio ||\n this.context.oBackingStorePixelRatio ||\n this.context.backingStorePixelRatio ||\n 1;\n\n this.ratio = (window.devicePixelRatio || 1) / backingStore;\n\n var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n 'canvas'\n ));\n rubberband_canvas.setAttribute(\n 'style',\n 'box-sizing: content-box; position: absolute; left: 0; top: 0; z-index: 1;'\n );\n\n // Apply a ponyfill if ResizeObserver is not implemented by browser.\n if (this.ResizeObserver === undefined) {\n if (window.ResizeObserver !== undefined) {\n this.ResizeObserver = window.ResizeObserver;\n } else {\n var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n this.ResizeObserver = obs.ResizeObserver;\n }\n }\n\n this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n var nentries = entries.length;\n for (var i = 0; i < nentries; i++) {\n var entry = entries[i];\n var width, height;\n if (entry.contentBoxSize) {\n if (entry.contentBoxSize instanceof Array) {\n // Chrome 84 implements new version of spec.\n width = entry.contentBoxSize[0].inlineSize;\n height = entry.contentBoxSize[0].blockSize;\n } else {\n // Firefox implements old version of spec.\n width = entry.contentBoxSize.inlineSize;\n height = entry.contentBoxSize.blockSize;\n }\n } else {\n // Chrome <84 implements even older version of spec.\n width = entry.contentRect.width;\n height = entry.contentRect.height;\n }\n\n // Keep the size of the canvas and rubber band canvas in sync with\n // the canvas container.\n if (entry.devicePixelContentBoxSize) {\n // Chrome 84 implements new version of spec.\n canvas.setAttribute(\n 'width',\n entry.devicePixelContentBoxSize[0].inlineSize\n );\n canvas.setAttribute(\n 'height',\n entry.devicePixelContentBoxSize[0].blockSize\n );\n } else {\n canvas.setAttribute('width', width * fig.ratio);\n canvas.setAttribute('height', height * fig.ratio);\n }\n canvas.setAttribute(\n 'style',\n 'width: ' + width + 'px; height: ' + height + 'px;'\n );\n\n rubberband_canvas.setAttribute('width', width);\n rubberband_canvas.setAttribute('height', height);\n\n // And update the size in Python. We ignore the initial 0/0 size\n // that occurs as the element is placed into the DOM, which should\n // otherwise not happen due to the minimum size styling.\n if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n fig.request_resize(width, height);\n }\n }\n });\n this.resizeObserverInstance.observe(canvas_div);\n\n function on_mouse_event_closure(name) {\n return function (event) {\n return fig.mouse_event(event, name);\n };\n }\n\n rubberband_canvas.addEventListener(\n 'mousedown',\n on_mouse_event_closure('button_press')\n );\n rubberband_canvas.addEventListener(\n 'mouseup',\n on_mouse_event_closure('button_release')\n );\n // Throttle sequential mouse events to 1 every 20ms.\n rubberband_canvas.addEventListener(\n 'mousemove',\n on_mouse_event_closure('motion_notify')\n );\n\n rubberband_canvas.addEventListener(\n 'mouseenter',\n on_mouse_event_closure('figure_enter')\n );\n rubberband_canvas.addEventListener(\n 'mouseleave',\n on_mouse_event_closure('figure_leave')\n );\n\n canvas_div.addEventListener('wheel', function (event) {\n if (event.deltaY < 0) {\n event.step = 1;\n } else {\n event.step = -1;\n }\n on_mouse_event_closure('scroll')(event);\n });\n\n canvas_div.appendChild(canvas);\n canvas_div.appendChild(rubberband_canvas);\n\n this.rubberband_context = rubberband_canvas.getContext('2d');\n this.rubberband_context.strokeStyle = '#000000';\n\n this._resize_canvas = function (width, height, forward) {\n if (forward) {\n canvas_div.style.width = width + 'px';\n canvas_div.style.height = height + 'px';\n }\n };\n\n // Disable right mouse context menu.\n this.rubberband_canvas.addEventListener('contextmenu', function (_e) {\n event.preventDefault();\n return false;\n });\n\n function set_focus() {\n canvas.focus();\n canvas_div.focus();\n }\n\n window.setTimeout(set_focus, 100);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'mpl-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n continue;\n }\n\n var button = (fig.buttons[name] = document.createElement('button'));\n button.classList = 'mpl-widget';\n button.setAttribute('role', 'button');\n button.setAttribute('aria-disabled', 'false');\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n\n var icon_img = document.createElement('img');\n icon_img.src = '_images/' + image + '.png';\n icon_img.srcset = '_images/' + image + '_large.png 2x';\n icon_img.alt = tooltip;\n button.appendChild(icon_img);\n\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n var fmt_picker = document.createElement('select');\n fmt_picker.classList = 'mpl-widget';\n toolbar.appendChild(fmt_picker);\n this.format_dropdown = fmt_picker;\n\n for (var ind in mpl.extensions) {\n var fmt = mpl.extensions[ind];\n var option = document.createElement('option');\n option.selected = fmt === mpl.default_extension;\n option.innerHTML = fmt;\n fmt_picker.appendChild(option);\n }\n\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n};\n\nmpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n // which will in turn request a refresh of the image.\n this.send_message('resize', { width: x_pixels, height: y_pixels });\n};\n\nmpl.figure.prototype.send_message = function (type, properties) {\n properties['type'] = type;\n properties['figure_id'] = this.id;\n this.ws.send(JSON.stringify(properties));\n};\n\nmpl.figure.prototype.send_draw_message = function () {\n if (!this.waiting) {\n this.waiting = true;\n this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n var format_dropdown = fig.format_dropdown;\n var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n fig.ondownload(fig, format);\n};\n\nmpl.figure.prototype.handle_resize = function (fig, msg) {\n var size = msg['size'];\n if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n fig._resize_canvas(size[0], size[1], msg['forward']);\n fig.send_message('refresh', {});\n }\n};\n\nmpl.figure.prototype.handle_rubberband = function (fig, msg) {\n var x0 = msg['x0'] / fig.ratio;\n var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n var x1 = msg['x1'] / fig.ratio;\n var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n x0 = Math.floor(x0) + 0.5;\n y0 = Math.floor(y0) + 0.5;\n x1 = Math.floor(x1) + 0.5;\n y1 = Math.floor(y1) + 0.5;\n var min_x = Math.min(x0, x1);\n var min_y = Math.min(y0, y1);\n var width = Math.abs(x1 - x0);\n var height = Math.abs(y1 - y0);\n\n fig.rubberband_context.clearRect(\n 0,\n 0,\n fig.canvas.width / fig.ratio,\n fig.canvas.height / fig.ratio\n );\n\n fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n};\n\nmpl.figure.prototype.handle_figure_label = function (fig, msg) {\n // Updates the figure title.\n fig.header.textContent = msg['label'];\n};\n\nmpl.figure.prototype.handle_cursor = function (fig, msg) {\n var cursor = msg['cursor'];\n switch (cursor) {\n case 0:\n cursor = 'pointer';\n break;\n case 1:\n cursor = 'default';\n break;\n case 2:\n cursor = 'crosshair';\n break;\n case 3:\n cursor = 'move';\n break;\n }\n fig.rubberband_canvas.style.cursor = cursor;\n};\n\nmpl.figure.prototype.handle_message = function (fig, msg) {\n fig.message.textContent = msg['message'];\n};\n\nmpl.figure.prototype.handle_draw = function (fig, _msg) {\n // Request the server to send over a new figure.\n fig.send_draw_message();\n};\n\nmpl.figure.prototype.handle_image_mode = function (fig, msg) {\n fig.image_mode = msg['mode'];\n};\n\nmpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n for (var key in msg) {\n if (!(key in fig.buttons)) {\n continue;\n }\n fig.buttons[key].disabled = !msg[key];\n fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n }\n};\n\nmpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n if (msg['mode'] === 'PAN') {\n fig.buttons['Pan'].classList.add('active');\n fig.buttons['Zoom'].classList.remove('active');\n } else if (msg['mode'] === 'ZOOM') {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.add('active');\n } else {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.remove('active');\n }\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Called whenever the canvas gets updated.\n this.send_message('ack', {});\n};\n\n// A function to construct a web socket function for onmessage handling.\n// Called in the figure constructor.\nmpl.figure.prototype._make_on_message_function = function (fig) {\n return function socket_on_message(evt) {\n if (evt.data instanceof Blob) {\n /* FIXME: We get \"Resource interpreted as Image but\n * transferred with MIME type text/plain:\" errors on\n * Chrome. But how to set the MIME type? It doesn't seem\n * to be part of the websocket stream */\n evt.data.type = 'image/png';\n\n /* Free the memory for the previous frames */\n if (fig.imageObj.src) {\n (window.URL || window.webkitURL).revokeObjectURL(\n fig.imageObj.src\n );\n }\n\n fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n evt.data\n );\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n } else if (\n typeof evt.data === 'string' &&\n evt.data.slice(0, 21) === 'data:image/png;base64'\n ) {\n fig.imageObj.src = evt.data;\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n }\n\n var msg = JSON.parse(evt.data);\n var msg_type = msg['type'];\n\n // Call the \"handle_{type}\" callback, which takes\n // the figure and JSON message as its only arguments.\n try {\n var callback = fig['handle_' + msg_type];\n } catch (e) {\n console.log(\n \"No handler for the '\" + msg_type + \"' message type: \",\n msg\n );\n return;\n }\n\n if (callback) {\n try {\n // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n callback(fig, msg);\n } catch (e) {\n console.log(\n \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n e,\n e.stack,\n msg\n );\n }\n }\n };\n};\n\n// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\nmpl.findpos = function (e) {\n //this section is from http://www.quirksmode.org/js/events_properties.html\n var targ;\n if (!e) {\n e = window.event;\n }\n if (e.target) {\n targ = e.target;\n } else if (e.srcElement) {\n targ = e.srcElement;\n }\n if (targ.nodeType === 3) {\n // defeat Safari bug\n targ = targ.parentNode;\n }\n\n // pageX,Y are the mouse positions relative to the document\n var boundingRect = targ.getBoundingClientRect();\n var x = e.pageX - (boundingRect.left + document.body.scrollLeft);\n var y = e.pageY - (boundingRect.top + document.body.scrollTop);\n\n return { x: x, y: y };\n};\n\n/*\n * return a copy of an object with only non-object keys\n * we need this to avoid circular references\n * http://stackoverflow.com/a/24161582/3208463\n */\nfunction simpleKeys(original) {\n return Object.keys(original).reduce(function (obj, key) {\n if (typeof original[key] !== 'object') {\n obj[key] = original[key];\n }\n return obj;\n }, {});\n}\n\nmpl.figure.prototype.mouse_event = function (event, name) {\n var canvas_pos = mpl.findpos(event);\n\n if (name === 'button_press') {\n this.canvas.focus();\n this.canvas_div.focus();\n }\n\n var x = canvas_pos.x * this.ratio;\n var y = canvas_pos.y * this.ratio;\n\n this.send_message(name, {\n x: x,\n y: y,\n button: event.button,\n step: event.step,\n guiEvent: simpleKeys(event),\n });\n\n /* This prevents the web browser from automatically changing to\n * the text insertion cursor when the button is pressed. We want\n * to control all of the cursor setting manually through the\n * 'cursor' event from matplotlib */\n event.preventDefault();\n return false;\n};\n\nmpl.figure.prototype._key_event_extra = function (_event, _name) {\n // Handle any extra behaviour associated with a key event\n};\n\nmpl.figure.prototype.key_event = function (event, name) {\n // Prevent repeat events\n if (name === 'key_press') {\n if (event.which === this._key) {\n return;\n } else {\n this._key = event.which;\n }\n }\n if (name === 'key_release') {\n this._key = null;\n }\n\n var value = '';\n if (event.ctrlKey && event.which !== 17) {\n value += 'ctrl+';\n }\n if (event.altKey && event.which !== 18) {\n value += 'alt+';\n }\n if (event.shiftKey && event.which !== 16) {\n value += 'shift+';\n }\n\n value += 'k';\n value += event.which.toString();\n\n this._key_event_extra(event, name);\n\n this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n return false;\n};\n\nmpl.figure.prototype.toolbar_button_onclick = function (name) {\n if (name === 'download') {\n this.handle_save(this, null);\n } else {\n this.send_message('toolbar_button', { name: name });\n }\n};\n\nmpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n this.message.textContent = tooltip;\n};\n\n///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n// prettier-ignore\nvar _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\nmpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n\nmpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n\nmpl.default_extension = \"png\";/* global mpl */\n\nvar comm_websocket_adapter = function (comm) {\n // Create a \"websocket\"-like object which calls the given IPython comm\n // object with the appropriate methods. Currently this is a non binary\n // socket, so there is still some room for performance tuning.\n var ws = {};\n\n ws.close = function () {\n comm.close();\n };\n ws.send = function (m) {\n //console.log('sending', m);\n comm.send(m);\n };\n // Register the callback with on_msg.\n comm.on_msg(function (msg) {\n //console.log('receiving', msg['content']['data'], msg);\n // Pass the mpl event to the overridden (by mpl) onmessage function.\n ws.onmessage(msg['content']['data']);\n });\n return ws;\n};\n\nmpl.mpl_figure_comm = function (comm, msg) {\n // This is the function which gets called when the mpl process\n // starts-up an IPython Comm through the \"matplotlib\" channel.\n\n var id = msg.content.data.id;\n // Get hold of the div created by the display call when the Comm\n // socket was opened in Python.\n var element = document.getElementById(id);\n var ws_proxy = comm_websocket_adapter(comm);\n\n function ondownload(figure, _format) {\n window.open(figure.canvas.toDataURL());\n }\n\n var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n\n // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n // web socket which is closed, not our websocket->open comm proxy.\n ws_proxy.onopen();\n\n fig.parent_element = element;\n fig.cell_info = mpl.find_output_cell(\"
\");\n if (!fig.cell_info) {\n console.error('Failed to find cell for figure', id, fig);\n return;\n }\n fig.cell_info[0].output_area.element.on(\n 'cleared',\n { fig: fig },\n fig._remove_fig_handler\n );\n};\n\nmpl.figure.prototype.handle_close = function (fig, msg) {\n var width = fig.canvas.width / fig.ratio;\n fig.cell_info[0].output_area.element.off(\n 'cleared',\n fig._remove_fig_handler\n );\n fig.resizeObserverInstance.unobserve(fig.canvas_div);\n\n // Update the output cell to use the data from the current canvas.\n fig.push_to_output();\n var dataURL = fig.canvas.toDataURL();\n // Re-enable the keyboard manager in IPython - without this line, in FF,\n // the notebook keyboard shortcuts fail.\n IPython.keyboard_manager.enable();\n fig.parent_element.innerHTML =\n '';\n fig.close_ws(fig, msg);\n};\n\nmpl.figure.prototype.close_ws = function (fig, msg) {\n fig.send_message('closing', msg);\n // fig.ws.close()\n};\n\nmpl.figure.prototype.push_to_output = function (_remove_interactive) {\n // Turn the data on the canvas into data in the output cell.\n var width = this.canvas.width / this.ratio;\n var dataURL = this.canvas.toDataURL();\n this.cell_info[1]['text/html'] =\n '';\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Tell IPython that the notebook contents must change.\n IPython.notebook.set_dirty(true);\n this.send_message('ack', {});\n var fig = this;\n // Wait a second, then push the new image to the DOM so\n // that it is saved nicely (might be nice to debounce this).\n setTimeout(function () {\n fig.push_to_output();\n }, 1000);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'btn-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n var button;\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n continue;\n }\n\n button = fig.buttons[name] = document.createElement('button');\n button.classList = 'btn btn-default';\n button.href = '#';\n button.title = name;\n button.innerHTML = '';\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n // Add the status bar.\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message pull-right';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n\n // Add the close button to the window.\n var buttongrp = document.createElement('div');\n buttongrp.classList = 'btn-group inline pull-right';\n button = document.createElement('button');\n button.classList = 'btn btn-mini btn-primary';\n button.href = '#';\n button.title = 'Stop Interaction';\n button.innerHTML = '';\n button.addEventListener('click', function (_evt) {\n fig.handle_close(fig, {});\n });\n button.addEventListener(\n 'mouseover',\n on_mouseover_closure('Stop Interaction')\n );\n buttongrp.appendChild(button);\n var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n titlebar.insertBefore(buttongrp, titlebar.firstChild);\n};\n\nmpl.figure.prototype._remove_fig_handler = function (event) {\n var fig = event.data.fig;\n if (event.target !== this) {\n // Ignore bubbled events from children.\n return;\n }\n fig.close_ws(fig, {});\n};\n\nmpl.figure.prototype._root_extra_style = function (el) {\n el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n};\n\nmpl.figure.prototype._canvas_extra_style = function (el) {\n // this is important to make the div 'focusable\n el.setAttribute('tabindex', 0);\n // reach out to IPython and tell the keyboard manager to turn it's self\n // off when our div gets focus\n\n // location in version 3\n if (IPython.notebook.keyboard_manager) {\n IPython.notebook.keyboard_manager.register_events(el);\n } else {\n // location in version 2\n IPython.keyboard_manager.register_events(el);\n }\n};\n\nmpl.figure.prototype._key_event_extra = function (event, _name) {\n var manager = IPython.notebook.keyboard_manager;\n if (!manager) {\n manager = IPython.keyboard_manager;\n }\n\n // Check for shift+enter\n if (event.shiftKey && event.which === 13) {\n this.canvas_div.blur();\n // select the cell after this one\n var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n IPython.notebook.select(index + 1);\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n fig.ondownload(fig, null);\n};\n\nmpl.find_output_cell = function (html_output) {\n // Return the cell and output element which can be found *uniquely* in the notebook.\n // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n // IPython event is triggered only after the cells have been serialised, which for\n // our purposes (turning an active figure into a static one), is too late.\n var cells = IPython.notebook.get_cells();\n var ncells = cells.length;\n for (var i = 0; i < ncells; i++) {\n var cell = cells[i];\n if (cell.cell_type === 'code') {\n for (var j = 0; j < cell.output_area.outputs.length; j++) {\n var data = cell.output_area.outputs[j];\n if (data.data) {\n // IPython >= 3 moved mimebundle to data attribute of output\n data = data.data;\n }\n if (data['text/html'] === html_output) {\n return [cell, data, j];\n }\n }\n }\n }\n};\n\n// Register the function which deals with the matplotlib target/channel.\n// The kernel may be null if the page has been refreshed.\nif (IPython.notebook.kernel !== null) {\n IPython.notebook.kernel.comm_manager.register_target(\n 'matplotlib',\n mpl.mpl_figure_comm\n );\n}\n", "text/plain": [ "" ] @@ -1293,955 +345,7 @@ "outputs": [ { "data": { - "application/javascript": [ - "/* Put everything inside the global mpl namespace */\n", - "/* global mpl */\n", - "window.mpl = {};\n", - "\n", - "mpl.get_websocket_type = function () {\n", - " if (typeof WebSocket !== 'undefined') {\n", - " return WebSocket;\n", - " } else if (typeof MozWebSocket !== 'undefined') {\n", - " return MozWebSocket;\n", - " } else {\n", - " alert(\n", - " 'Your browser does not have WebSocket support. ' +\n", - " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", - " 'Firefox 4 and 5 are also supported but you ' +\n", - " 'have to enable WebSockets in about:config.'\n", - " );\n", - " }\n", - "};\n", - "\n", - "mpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n", - " this.id = figure_id;\n", - "\n", - " this.ws = websocket;\n", - "\n", - " this.supports_binary = this.ws.binaryType !== undefined;\n", - "\n", - " if (!this.supports_binary) {\n", - " var warnings = document.getElementById('mpl-warnings');\n", - " if (warnings) {\n", - " warnings.style.display = 'block';\n", - " warnings.textContent =\n", - " 'This browser does not support binary websocket messages. ' +\n", - " 'Performance may be slow.';\n", - " }\n", - " }\n", - "\n", - " this.imageObj = new Image();\n", - "\n", - " this.context = undefined;\n", - " this.message = undefined;\n", - " this.canvas = undefined;\n", - " this.rubberband_canvas = undefined;\n", - " this.rubberband_context = undefined;\n", - " this.format_dropdown = undefined;\n", - "\n", - " this.image_mode = 'full';\n", - "\n", - " this.root = document.createElement('div');\n", - " this.root.setAttribute('style', 'display: inline-block');\n", - " this._root_extra_style(this.root);\n", - "\n", - " parent_element.appendChild(this.root);\n", - "\n", - " this._init_header(this);\n", - " this._init_canvas(this);\n", - " this._init_toolbar(this);\n", - "\n", - " var fig = this;\n", - "\n", - " this.waiting = false;\n", - "\n", - " this.ws.onopen = function () {\n", - " fig.send_message('supports_binary', { value: fig.supports_binary });\n", - " fig.send_message('send_image_mode', {});\n", - " if (fig.ratio !== 1) {\n", - " fig.send_message('set_dpi_ratio', { dpi_ratio: fig.ratio });\n", - " }\n", - " fig.send_message('refresh', {});\n", - " };\n", - "\n", - " this.imageObj.onload = function () {\n", - " if (fig.image_mode === 'full') {\n", - " // Full images could contain transparency (where diff images\n", - " // almost always do), so we need to clear the canvas so that\n", - " // there is no ghosting.\n", - " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", - " }\n", - " fig.context.drawImage(fig.imageObj, 0, 0);\n", - " };\n", - "\n", - " this.imageObj.onunload = function () {\n", - " fig.ws.close();\n", - " };\n", - "\n", - " this.ws.onmessage = this._make_on_message_function(this);\n", - "\n", - " this.ondownload = ondownload;\n", - "};\n", - "\n", - "mpl.figure.prototype._init_header = function () {\n", - " var titlebar = document.createElement('div');\n", - " titlebar.classList =\n", - " 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n", - " var titletext = document.createElement('div');\n", - " titletext.classList = 'ui-dialog-title';\n", - " titletext.setAttribute(\n", - " 'style',\n", - " 'width: 100%; text-align: center; padding: 3px;'\n", - " );\n", - " titlebar.appendChild(titletext);\n", - " this.root.appendChild(titlebar);\n", - " this.header = titletext;\n", - "};\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n", - "\n", - "mpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n", - "\n", - "mpl.figure.prototype._init_canvas = function () {\n", - " var fig = this;\n", - "\n", - " var canvas_div = (this.canvas_div = document.createElement('div'));\n", - " canvas_div.setAttribute(\n", - " 'style',\n", - " 'border: 1px solid #ddd;' +\n", - " 'box-sizing: content-box;' +\n", - " 'clear: both;' +\n", - " 'min-height: 1px;' +\n", - " 'min-width: 1px;' +\n", - " 'outline: 0;' +\n", - " 'overflow: hidden;' +\n", - " 'position: relative;' +\n", - " 'resize: both;'\n", - " );\n", - "\n", - " function on_keyboard_event_closure(name) {\n", - " return function (event) {\n", - " return fig.key_event(event, name);\n", - " };\n", - " }\n", - "\n", - " canvas_div.addEventListener(\n", - " 'keydown',\n", - " on_keyboard_event_closure('key_press')\n", - " );\n", - " canvas_div.addEventListener(\n", - " 'keyup',\n", - " on_keyboard_event_closure('key_release')\n", - " );\n", - "\n", - " this._canvas_extra_style(canvas_div);\n", - " this.root.appendChild(canvas_div);\n", - "\n", - " var canvas = (this.canvas = document.createElement('canvas'));\n", - " canvas.classList.add('mpl-canvas');\n", - " canvas.setAttribute('style', 'box-sizing: content-box;');\n", - "\n", - " this.context = canvas.getContext('2d');\n", - "\n", - " var backingStore =\n", - " this.context.backingStorePixelRatio ||\n", - " this.context.webkitBackingStorePixelRatio ||\n", - " this.context.mozBackingStorePixelRatio ||\n", - " this.context.msBackingStorePixelRatio ||\n", - " this.context.oBackingStorePixelRatio ||\n", - " this.context.backingStorePixelRatio ||\n", - " 1;\n", - "\n", - " this.ratio = (window.devicePixelRatio || 1) / backingStore;\n", - "\n", - " var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n", - " 'canvas'\n", - " ));\n", - " rubberband_canvas.setAttribute(\n", - " 'style',\n", - " 'box-sizing: content-box; position: absolute; left: 0; top: 0; z-index: 1;'\n", - " );\n", - "\n", - " // Apply a ponyfill if ResizeObserver is not implemented by browser.\n", - " if (this.ResizeObserver === undefined) {\n", - " if (window.ResizeObserver !== undefined) {\n", - " this.ResizeObserver = window.ResizeObserver;\n", - " } else {\n", - " var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n", - " this.ResizeObserver = obs.ResizeObserver;\n", - " }\n", - " }\n", - "\n", - " this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n", - " var nentries = entries.length;\n", - " for (var i = 0; i < nentries; i++) {\n", - " var entry = entries[i];\n", - " var width, height;\n", - " if (entry.contentBoxSize) {\n", - " if (entry.contentBoxSize instanceof Array) {\n", - " // Chrome 84 implements new version of spec.\n", - " width = entry.contentBoxSize[0].inlineSize;\n", - " height = entry.contentBoxSize[0].blockSize;\n", - " } else {\n", - " // Firefox implements old version of spec.\n", - " width = entry.contentBoxSize.inlineSize;\n", - " height = entry.contentBoxSize.blockSize;\n", - " }\n", - " } else {\n", - " // Chrome <84 implements even older version of spec.\n", - " width = entry.contentRect.width;\n", - " height = entry.contentRect.height;\n", - " }\n", - "\n", - " // Keep the size of the canvas and rubber band canvas in sync with\n", - " // the canvas container.\n", - " if (entry.devicePixelContentBoxSize) {\n", - " // Chrome 84 implements new version of spec.\n", - " canvas.setAttribute(\n", - " 'width',\n", - " entry.devicePixelContentBoxSize[0].inlineSize\n", - " );\n", - " canvas.setAttribute(\n", - " 'height',\n", - " entry.devicePixelContentBoxSize[0].blockSize\n", - " );\n", - " } else {\n", - " canvas.setAttribute('width', width * fig.ratio);\n", - " canvas.setAttribute('height', height * fig.ratio);\n", - " }\n", - " canvas.setAttribute(\n", - " 'style',\n", - " 'width: ' + width + 'px; height: ' + height + 'px;'\n", - " );\n", - "\n", - " rubberband_canvas.setAttribute('width', width);\n", - " rubberband_canvas.setAttribute('height', height);\n", - "\n", - " // And update the size in Python. We ignore the initial 0/0 size\n", - " // that occurs as the element is placed into the DOM, which should\n", - " // otherwise not happen due to the minimum size styling.\n", - " if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n", - " fig.request_resize(width, height);\n", - " }\n", - " }\n", - " });\n", - " this.resizeObserverInstance.observe(canvas_div);\n", - "\n", - " function on_mouse_event_closure(name) {\n", - " return function (event) {\n", - " return fig.mouse_event(event, name);\n", - " };\n", - " }\n", - "\n", - " rubberband_canvas.addEventListener(\n", - " 'mousedown',\n", - " on_mouse_event_closure('button_press')\n", - " );\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseup',\n", - " on_mouse_event_closure('button_release')\n", - " );\n", - " // Throttle sequential mouse events to 1 every 20ms.\n", - " rubberband_canvas.addEventListener(\n", - " 'mousemove',\n", - " on_mouse_event_closure('motion_notify')\n", - " );\n", - "\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseenter',\n", - " on_mouse_event_closure('figure_enter')\n", - " );\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseleave',\n", - " on_mouse_event_closure('figure_leave')\n", - " );\n", - "\n", - " canvas_div.addEventListener('wheel', function (event) {\n", - " if (event.deltaY < 0) {\n", - " event.step = 1;\n", - " } else {\n", - " event.step = -1;\n", - " }\n", - " on_mouse_event_closure('scroll')(event);\n", - " });\n", - "\n", - " canvas_div.appendChild(canvas);\n", - " canvas_div.appendChild(rubberband_canvas);\n", - "\n", - " this.rubberband_context = rubberband_canvas.getContext('2d');\n", - " this.rubberband_context.strokeStyle = '#000000';\n", - "\n", - " this._resize_canvas = function (width, height, forward) {\n", - " if (forward) {\n", - " canvas_div.style.width = width + 'px';\n", - " canvas_div.style.height = height + 'px';\n", - " }\n", - " };\n", - "\n", - " // Disable right mouse context menu.\n", - " this.rubberband_canvas.addEventListener('contextmenu', function (_e) {\n", - " event.preventDefault();\n", - " return false;\n", - " });\n", - "\n", - " function set_focus() {\n", - " canvas.focus();\n", - " canvas_div.focus();\n", - " }\n", - "\n", - " window.setTimeout(set_focus, 100);\n", - "};\n", - "\n", - "mpl.figure.prototype._init_toolbar = function () {\n", - " var fig = this;\n", - "\n", - " var toolbar = document.createElement('div');\n", - " toolbar.classList = 'mpl-toolbar';\n", - " this.root.appendChild(toolbar);\n", - "\n", - " function on_click_closure(name) {\n", - " return function (_event) {\n", - " return fig.toolbar_button_onclick(name);\n", - " };\n", - " }\n", - "\n", - " function on_mouseover_closure(tooltip) {\n", - " return function (event) {\n", - " if (!event.currentTarget.disabled) {\n", - " return fig.toolbar_button_onmouseover(tooltip);\n", - " }\n", - " };\n", - " }\n", - "\n", - " fig.buttons = {};\n", - " var buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'mpl-button-group';\n", - " for (var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " /* Instead of a spacer, we start a new button group. */\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - " buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'mpl-button-group';\n", - " continue;\n", - " }\n", - "\n", - " var button = (fig.buttons[name] = document.createElement('button'));\n", - " button.classList = 'mpl-widget';\n", - " button.setAttribute('role', 'button');\n", - " button.setAttribute('aria-disabled', 'false');\n", - " button.addEventListener('click', on_click_closure(method_name));\n", - " button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n", - "\n", - " var icon_img = document.createElement('img');\n", - " icon_img.src = '_images/' + image + '.png';\n", - " icon_img.srcset = '_images/' + image + '_large.png 2x';\n", - " icon_img.alt = tooltip;\n", - " button.appendChild(icon_img);\n", - "\n", - " buttonGroup.appendChild(button);\n", - " }\n", - "\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - "\n", - " var fmt_picker = document.createElement('select');\n", - " fmt_picker.classList = 'mpl-widget';\n", - " toolbar.appendChild(fmt_picker);\n", - " this.format_dropdown = fmt_picker;\n", - "\n", - " for (var ind in mpl.extensions) {\n", - " var fmt = mpl.extensions[ind];\n", - " var option = document.createElement('option');\n", - " option.selected = fmt === mpl.default_extension;\n", - " option.innerHTML = fmt;\n", - " fmt_picker.appendChild(option);\n", - " }\n", - "\n", - " var status_bar = document.createElement('span');\n", - " status_bar.classList = 'mpl-message';\n", - " toolbar.appendChild(status_bar);\n", - " this.message = status_bar;\n", - "};\n", - "\n", - "mpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n", - " // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n", - " // which will in turn request a refresh of the image.\n", - " this.send_message('resize', { width: x_pixels, height: y_pixels });\n", - "};\n", - "\n", - "mpl.figure.prototype.send_message = function (type, properties) {\n", - " properties['type'] = type;\n", - " properties['figure_id'] = this.id;\n", - " this.ws.send(JSON.stringify(properties));\n", - "};\n", - "\n", - "mpl.figure.prototype.send_draw_message = function () {\n", - " if (!this.waiting) {\n", - " this.waiting = true;\n", - " this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_save = function (fig, _msg) {\n", - " var format_dropdown = fig.format_dropdown;\n", - " var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n", - " fig.ondownload(fig, format);\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_resize = function (fig, msg) {\n", - " var size = msg['size'];\n", - " if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n", - " fig._resize_canvas(size[0], size[1], msg['forward']);\n", - " fig.send_message('refresh', {});\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_rubberband = function (fig, msg) {\n", - " var x0 = msg['x0'] / fig.ratio;\n", - " var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n", - " var x1 = msg['x1'] / fig.ratio;\n", - " var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n", - " x0 = Math.floor(x0) + 0.5;\n", - " y0 = Math.floor(y0) + 0.5;\n", - " x1 = Math.floor(x1) + 0.5;\n", - " y1 = Math.floor(y1) + 0.5;\n", - " var min_x = Math.min(x0, x1);\n", - " var min_y = Math.min(y0, y1);\n", - " var width = Math.abs(x1 - x0);\n", - " var height = Math.abs(y1 - y0);\n", - "\n", - " fig.rubberband_context.clearRect(\n", - " 0,\n", - " 0,\n", - " fig.canvas.width / fig.ratio,\n", - " fig.canvas.height / fig.ratio\n", - " );\n", - "\n", - " fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_figure_label = function (fig, msg) {\n", - " // Updates the figure title.\n", - " fig.header.textContent = msg['label'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_cursor = function (fig, msg) {\n", - " var cursor = msg['cursor'];\n", - " switch (cursor) {\n", - " case 0:\n", - " cursor = 'pointer';\n", - " break;\n", - " case 1:\n", - " cursor = 'default';\n", - " break;\n", - " case 2:\n", - " cursor = 'crosshair';\n", - " break;\n", - " case 3:\n", - " cursor = 'move';\n", - " break;\n", - " }\n", - " fig.rubberband_canvas.style.cursor = cursor;\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_message = function (fig, msg) {\n", - " fig.message.textContent = msg['message'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_draw = function (fig, _msg) {\n", - " // Request the server to send over a new figure.\n", - " fig.send_draw_message();\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_image_mode = function (fig, msg) {\n", - " fig.image_mode = msg['mode'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n", - " for (var key in msg) {\n", - " if (!(key in fig.buttons)) {\n", - " continue;\n", - " }\n", - " fig.buttons[key].disabled = !msg[key];\n", - " fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n", - " if (msg['mode'] === 'PAN') {\n", - " fig.buttons['Pan'].classList.add('active');\n", - " fig.buttons['Zoom'].classList.remove('active');\n", - " } else if (msg['mode'] === 'ZOOM') {\n", - " fig.buttons['Pan'].classList.remove('active');\n", - " fig.buttons['Zoom'].classList.add('active');\n", - " } else {\n", - " fig.buttons['Pan'].classList.remove('active');\n", - " fig.buttons['Zoom'].classList.remove('active');\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.updated_canvas_event = function () {\n", - " // Called whenever the canvas gets updated.\n", - " this.send_message('ack', {});\n", - "};\n", - "\n", - "// A function to construct a web socket function for onmessage handling.\n", - "// Called in the figure constructor.\n", - "mpl.figure.prototype._make_on_message_function = function (fig) {\n", - " return function socket_on_message(evt) {\n", - " if (evt.data instanceof Blob) {\n", - " /* FIXME: We get \"Resource interpreted as Image but\n", - " * transferred with MIME type text/plain:\" errors on\n", - " * Chrome. But how to set the MIME type? It doesn't seem\n", - " * to be part of the websocket stream */\n", - " evt.data.type = 'image/png';\n", - "\n", - " /* Free the memory for the previous frames */\n", - " if (fig.imageObj.src) {\n", - " (window.URL || window.webkitURL).revokeObjectURL(\n", - " fig.imageObj.src\n", - " );\n", - " }\n", - "\n", - " fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n", - " evt.data\n", - " );\n", - " fig.updated_canvas_event();\n", - " fig.waiting = false;\n", - " return;\n", - " } else if (\n", - " typeof evt.data === 'string' &&\n", - " evt.data.slice(0, 21) === 'data:image/png;base64'\n", - " ) {\n", - " fig.imageObj.src = evt.data;\n", - " fig.updated_canvas_event();\n", - " fig.waiting = false;\n", - " return;\n", - " }\n", - "\n", - " var msg = JSON.parse(evt.data);\n", - " var msg_type = msg['type'];\n", - "\n", - " // Call the \"handle_{type}\" callback, which takes\n", - " // the figure and JSON message as its only arguments.\n", - " try {\n", - " var callback = fig['handle_' + msg_type];\n", - " } catch (e) {\n", - " console.log(\n", - " \"No handler for the '\" + msg_type + \"' message type: \",\n", - " msg\n", - " );\n", - " return;\n", - " }\n", - "\n", - " if (callback) {\n", - " try {\n", - " // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n", - " callback(fig, msg);\n", - " } catch (e) {\n", - " console.log(\n", - " \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n", - " e,\n", - " e.stack,\n", - " msg\n", - " );\n", - " }\n", - " }\n", - " };\n", - "};\n", - "\n", - "// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\n", - "mpl.findpos = function (e) {\n", - " //this section is from http://www.quirksmode.org/js/events_properties.html\n", - " var targ;\n", - " if (!e) {\n", - " e = window.event;\n", - " }\n", - " if (e.target) {\n", - " targ = e.target;\n", - " } else if (e.srcElement) {\n", - " targ = e.srcElement;\n", - " }\n", - " if (targ.nodeType === 3) {\n", - " // defeat Safari bug\n", - " targ = targ.parentNode;\n", - " }\n", - "\n", - " // pageX,Y are the mouse positions relative to the document\n", - " var boundingRect = targ.getBoundingClientRect();\n", - " var x = e.pageX - (boundingRect.left + document.body.scrollLeft);\n", - " var y = e.pageY - (boundingRect.top + document.body.scrollTop);\n", - "\n", - " return { x: x, y: y };\n", - "};\n", - "\n", - "/*\n", - " * return a copy of an object with only non-object keys\n", - " * we need this to avoid circular references\n", - " * http://stackoverflow.com/a/24161582/3208463\n", - " */\n", - "function simpleKeys(original) {\n", - " return Object.keys(original).reduce(function (obj, key) {\n", - " if (typeof original[key] !== 'object') {\n", - " obj[key] = original[key];\n", - " }\n", - " return obj;\n", - " }, {});\n", - "}\n", - "\n", - "mpl.figure.prototype.mouse_event = function (event, name) {\n", - " var canvas_pos = mpl.findpos(event);\n", - "\n", - " if (name === 'button_press') {\n", - " this.canvas.focus();\n", - " this.canvas_div.focus();\n", - " }\n", - "\n", - " var x = canvas_pos.x * this.ratio;\n", - " var y = canvas_pos.y * this.ratio;\n", - "\n", - " this.send_message(name, {\n", - " x: x,\n", - " y: y,\n", - " button: event.button,\n", - " step: event.step,\n", - " guiEvent: simpleKeys(event),\n", - " });\n", - "\n", - " /* This prevents the web browser from automatically changing to\n", - " * the text insertion cursor when the button is pressed. We want\n", - " * to control all of the cursor setting manually through the\n", - " * 'cursor' event from matplotlib */\n", - " event.preventDefault();\n", - " return false;\n", - "};\n", - "\n", - "mpl.figure.prototype._key_event_extra = function (_event, _name) {\n", - " // Handle any extra behaviour associated with a key event\n", - "};\n", - "\n", - "mpl.figure.prototype.key_event = function (event, name) {\n", - " // Prevent repeat events\n", - " if (name === 'key_press') {\n", - " if (event.which === this._key) {\n", - " return;\n", - " } else {\n", - " this._key = event.which;\n", - " }\n", - " }\n", - " if (name === 'key_release') {\n", - " this._key = null;\n", - " }\n", - "\n", - " var value = '';\n", - " if (event.ctrlKey && event.which !== 17) {\n", - " value += 'ctrl+';\n", - " }\n", - " if (event.altKey && event.which !== 18) {\n", - " value += 'alt+';\n", - " }\n", - " if (event.shiftKey && event.which !== 16) {\n", - " value += 'shift+';\n", - " }\n", - "\n", - " value += 'k';\n", - " value += event.which.toString();\n", - "\n", - " this._key_event_extra(event, name);\n", - "\n", - " this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n", - " return false;\n", - "};\n", - "\n", - "mpl.figure.prototype.toolbar_button_onclick = function (name) {\n", - " if (name === 'download') {\n", - " this.handle_save(this, null);\n", - " } else {\n", - " this.send_message('toolbar_button', { name: name });\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n", - " this.message.textContent = tooltip;\n", - "};\n", - "\n", - "///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n", - "// prettier-ignore\n", - "var _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\n", - "mpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n", - "\n", - "mpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n", - "\n", - "mpl.default_extension = \"png\";/* global mpl */\n", - "\n", - "var comm_websocket_adapter = function (comm) {\n", - " // Create a \"websocket\"-like object which calls the given IPython comm\n", - " // object with the appropriate methods. Currently this is a non binary\n", - " // socket, so there is still some room for performance tuning.\n", - " var ws = {};\n", - "\n", - " ws.close = function () {\n", - " comm.close();\n", - " };\n", - " ws.send = function (m) {\n", - " //console.log('sending', m);\n", - " comm.send(m);\n", - " };\n", - " // Register the callback with on_msg.\n", - " comm.on_msg(function (msg) {\n", - " //console.log('receiving', msg['content']['data'], msg);\n", - " // Pass the mpl event to the overridden (by mpl) onmessage function.\n", - " ws.onmessage(msg['content']['data']);\n", - " });\n", - " return ws;\n", - "};\n", - "\n", - "mpl.mpl_figure_comm = function (comm, msg) {\n", - " // This is the function which gets called when the mpl process\n", - " // starts-up an IPython Comm through the \"matplotlib\" channel.\n", - "\n", - " var id = msg.content.data.id;\n", - " // Get hold of the div created by the display call when the Comm\n", - " // socket was opened in Python.\n", - " var element = document.getElementById(id);\n", - " var ws_proxy = comm_websocket_adapter(comm);\n", - "\n", - " function ondownload(figure, _format) {\n", - " window.open(figure.canvas.toDataURL());\n", - " }\n", - "\n", - " var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n", - "\n", - " // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n", - " // web socket which is closed, not our websocket->open comm proxy.\n", - " ws_proxy.onopen();\n", - "\n", - " fig.parent_element = element;\n", - " fig.cell_info = mpl.find_output_cell(\"
\");\n", - " if (!fig.cell_info) {\n", - " console.error('Failed to find cell for figure', id, fig);\n", - " return;\n", - " }\n", - " fig.cell_info[0].output_area.element.on(\n", - " 'cleared',\n", - " { fig: fig },\n", - " fig._remove_fig_handler\n", - " );\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_close = function (fig, msg) {\n", - " var width = fig.canvas.width / fig.ratio;\n", - " fig.cell_info[0].output_area.element.off(\n", - " 'cleared',\n", - " fig._remove_fig_handler\n", - " );\n", - " fig.resizeObserverInstance.unobserve(fig.canvas_div);\n", - "\n", - " // Update the output cell to use the data from the current canvas.\n", - " fig.push_to_output();\n", - " var dataURL = fig.canvas.toDataURL();\n", - " // Re-enable the keyboard manager in IPython - without this line, in FF,\n", - " // the notebook keyboard shortcuts fail.\n", - " IPython.keyboard_manager.enable();\n", - " fig.parent_element.innerHTML =\n", - " '';\n", - " fig.close_ws(fig, msg);\n", - "};\n", - "\n", - "mpl.figure.prototype.close_ws = function (fig, msg) {\n", - " fig.send_message('closing', msg);\n", - " // fig.ws.close()\n", - "};\n", - "\n", - "mpl.figure.prototype.push_to_output = function (_remove_interactive) {\n", - " // Turn the data on the canvas into data in the output cell.\n", - " var width = this.canvas.width / this.ratio;\n", - " var dataURL = this.canvas.toDataURL();\n", - " this.cell_info[1]['text/html'] =\n", - " '';\n", - "};\n", - "\n", - "mpl.figure.prototype.updated_canvas_event = function () {\n", - " // Tell IPython that the notebook contents must change.\n", - " IPython.notebook.set_dirty(true);\n", - " this.send_message('ack', {});\n", - " var fig = this;\n", - " // Wait a second, then push the new image to the DOM so\n", - " // that it is saved nicely (might be nice to debounce this).\n", - " setTimeout(function () {\n", - " fig.push_to_output();\n", - " }, 1000);\n", - "};\n", - "\n", - "mpl.figure.prototype._init_toolbar = function () {\n", - " var fig = this;\n", - "\n", - " var toolbar = document.createElement('div');\n", - " toolbar.classList = 'btn-toolbar';\n", - " this.root.appendChild(toolbar);\n", - "\n", - " function on_click_closure(name) {\n", - " return function (_event) {\n", - " return fig.toolbar_button_onclick(name);\n", - " };\n", - " }\n", - "\n", - " function on_mouseover_closure(tooltip) {\n", - " return function (event) {\n", - " if (!event.currentTarget.disabled) {\n", - " return fig.toolbar_button_onmouseover(tooltip);\n", - " }\n", - " };\n", - " }\n", - "\n", - " fig.buttons = {};\n", - " var buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'btn-group';\n", - " var button;\n", - " for (var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " /* Instead of a spacer, we start a new button group. */\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - " buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'btn-group';\n", - " continue;\n", - " }\n", - "\n", - " button = fig.buttons[name] = document.createElement('button');\n", - " button.classList = 'btn btn-default';\n", - " button.href = '#';\n", - " button.title = name;\n", - " button.innerHTML = '';\n", - " button.addEventListener('click', on_click_closure(method_name));\n", - " button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n", - " buttonGroup.appendChild(button);\n", - " }\n", - "\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - "\n", - " // Add the status bar.\n", - " var status_bar = document.createElement('span');\n", - " status_bar.classList = 'mpl-message pull-right';\n", - " toolbar.appendChild(status_bar);\n", - " this.message = status_bar;\n", - "\n", - " // Add the close button to the window.\n", - " var buttongrp = document.createElement('div');\n", - " buttongrp.classList = 'btn-group inline pull-right';\n", - " button = document.createElement('button');\n", - " button.classList = 'btn btn-mini btn-primary';\n", - " button.href = '#';\n", - " button.title = 'Stop Interaction';\n", - " button.innerHTML = '';\n", - " button.addEventListener('click', function (_evt) {\n", - " fig.handle_close(fig, {});\n", - " });\n", - " button.addEventListener(\n", - " 'mouseover',\n", - " on_mouseover_closure('Stop Interaction')\n", - " );\n", - " buttongrp.appendChild(button);\n", - " var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n", - " titlebar.insertBefore(buttongrp, titlebar.firstChild);\n", - "};\n", - "\n", - "mpl.figure.prototype._remove_fig_handler = function (event) {\n", - " var fig = event.data.fig;\n", - " if (event.target !== this) {\n", - " // Ignore bubbled events from children.\n", - " return;\n", - " }\n", - " fig.close_ws(fig, {});\n", - "};\n", - "\n", - "mpl.figure.prototype._root_extra_style = function (el) {\n", - " el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n", - "};\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function (el) {\n", - " // this is important to make the div 'focusable\n", - " el.setAttribute('tabindex', 0);\n", - " // reach out to IPython and tell the keyboard manager to turn it's self\n", - " // off when our div gets focus\n", - "\n", - " // location in version 3\n", - " if (IPython.notebook.keyboard_manager) {\n", - " IPython.notebook.keyboard_manager.register_events(el);\n", - " } else {\n", - " // location in version 2\n", - " IPython.keyboard_manager.register_events(el);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype._key_event_extra = function (event, _name) {\n", - " var manager = IPython.notebook.keyboard_manager;\n", - " if (!manager) {\n", - " manager = IPython.keyboard_manager;\n", - " }\n", - "\n", - " // Check for shift+enter\n", - " if (event.shiftKey && event.which === 13) {\n", - " this.canvas_div.blur();\n", - " // select the cell after this one\n", - " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", - " IPython.notebook.select(index + 1);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_save = function (fig, _msg) {\n", - " fig.ondownload(fig, null);\n", - "};\n", - "\n", - "mpl.find_output_cell = function (html_output) {\n", - " // Return the cell and output element which can be found *uniquely* in the notebook.\n", - " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", - " // IPython event is triggered only after the cells have been serialised, which for\n", - " // our purposes (turning an active figure into a static one), is too late.\n", - " var cells = IPython.notebook.get_cells();\n", - " var ncells = cells.length;\n", - " for (var i = 0; i < ncells; i++) {\n", - " var cell = cells[i];\n", - " if (cell.cell_type === 'code') {\n", - " for (var j = 0; j < cell.output_area.outputs.length; j++) {\n", - " var data = cell.output_area.outputs[j];\n", - " if (data.data) {\n", - " // IPython >= 3 moved mimebundle to data attribute of output\n", - " data = data.data;\n", - " }\n", - " if (data['text/html'] === html_output) {\n", - " return [cell, data, j];\n", - " }\n", - " }\n", - " }\n", - " }\n", - "};\n", - "\n", - "// Register the function which deals with the matplotlib target/channel.\n", - "// The kernel may be null if the page has been refreshed.\n", - "if (IPython.notebook.kernel !== null) {\n", - " IPython.notebook.kernel.comm_manager.register_target(\n", - " 'matplotlib',\n", - " mpl.mpl_figure_comm\n", - " );\n", - "}\n" - ], + "application/javascript": "/* Put everything inside the global mpl namespace */\n/* global mpl */\nwindow.mpl = {};\n\nmpl.get_websocket_type = function () {\n if (typeof WebSocket !== 'undefined') {\n return WebSocket;\n } else if (typeof MozWebSocket !== 'undefined') {\n return MozWebSocket;\n } else {\n alert(\n 'Your browser does not have WebSocket support. ' +\n 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n 'Firefox 4 and 5 are also supported but you ' +\n 'have to enable WebSockets in about:config.'\n );\n }\n};\n\nmpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n this.id = figure_id;\n\n this.ws = websocket;\n\n this.supports_binary = this.ws.binaryType !== undefined;\n\n if (!this.supports_binary) {\n var warnings = document.getElementById('mpl-warnings');\n if (warnings) {\n warnings.style.display = 'block';\n warnings.textContent =\n 'This browser does not support binary websocket messages. ' +\n 'Performance may be slow.';\n }\n }\n\n this.imageObj = new Image();\n\n this.context = undefined;\n this.message = undefined;\n this.canvas = undefined;\n this.rubberband_canvas = undefined;\n this.rubberband_context = undefined;\n this.format_dropdown = undefined;\n\n this.image_mode = 'full';\n\n this.root = document.createElement('div');\n this.root.setAttribute('style', 'display: inline-block');\n this._root_extra_style(this.root);\n\n parent_element.appendChild(this.root);\n\n this._init_header(this);\n this._init_canvas(this);\n this._init_toolbar(this);\n\n var fig = this;\n\n this.waiting = false;\n\n this.ws.onopen = function () {\n fig.send_message('supports_binary', { value: fig.supports_binary });\n fig.send_message('send_image_mode', {});\n if (fig.ratio !== 1) {\n fig.send_message('set_dpi_ratio', { dpi_ratio: fig.ratio });\n }\n fig.send_message('refresh', {});\n };\n\n this.imageObj.onload = function () {\n if (fig.image_mode === 'full') {\n // Full images could contain transparency (where diff images\n // almost always do), so we need to clear the canvas so that\n // there is no ghosting.\n fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n }\n fig.context.drawImage(fig.imageObj, 0, 0);\n };\n\n this.imageObj.onunload = function () {\n fig.ws.close();\n };\n\n this.ws.onmessage = this._make_on_message_function(this);\n\n this.ondownload = ondownload;\n};\n\nmpl.figure.prototype._init_header = function () {\n var titlebar = document.createElement('div');\n titlebar.classList =\n 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n var titletext = document.createElement('div');\n titletext.classList = 'ui-dialog-title';\n titletext.setAttribute(\n 'style',\n 'width: 100%; text-align: center; padding: 3px;'\n );\n titlebar.appendChild(titletext);\n this.root.appendChild(titlebar);\n this.header = titletext;\n};\n\nmpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._init_canvas = function () {\n var fig = this;\n\n var canvas_div = (this.canvas_div = document.createElement('div'));\n canvas_div.setAttribute(\n 'style',\n 'border: 1px solid #ddd;' +\n 'box-sizing: content-box;' +\n 'clear: both;' +\n 'min-height: 1px;' +\n 'min-width: 1px;' +\n 'outline: 0;' +\n 'overflow: hidden;' +\n 'position: relative;' +\n 'resize: both;'\n );\n\n function on_keyboard_event_closure(name) {\n return function (event) {\n return fig.key_event(event, name);\n };\n }\n\n canvas_div.addEventListener(\n 'keydown',\n on_keyboard_event_closure('key_press')\n );\n canvas_div.addEventListener(\n 'keyup',\n on_keyboard_event_closure('key_release')\n );\n\n this._canvas_extra_style(canvas_div);\n this.root.appendChild(canvas_div);\n\n var canvas = (this.canvas = document.createElement('canvas'));\n canvas.classList.add('mpl-canvas');\n canvas.setAttribute('style', 'box-sizing: content-box;');\n\n this.context = canvas.getContext('2d');\n\n var backingStore =\n this.context.backingStorePixelRatio ||\n this.context.webkitBackingStorePixelRatio ||\n this.context.mozBackingStorePixelRatio ||\n this.context.msBackingStorePixelRatio ||\n this.context.oBackingStorePixelRatio ||\n this.context.backingStorePixelRatio ||\n 1;\n\n this.ratio = (window.devicePixelRatio || 1) / backingStore;\n\n var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n 'canvas'\n ));\n rubberband_canvas.setAttribute(\n 'style',\n 'box-sizing: content-box; position: absolute; left: 0; top: 0; z-index: 1;'\n );\n\n // Apply a ponyfill if ResizeObserver is not implemented by browser.\n if (this.ResizeObserver === undefined) {\n if (window.ResizeObserver !== undefined) {\n this.ResizeObserver = window.ResizeObserver;\n } else {\n var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n this.ResizeObserver = obs.ResizeObserver;\n }\n }\n\n this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n var nentries = entries.length;\n for (var i = 0; i < nentries; i++) {\n var entry = entries[i];\n var width, height;\n if (entry.contentBoxSize) {\n if (entry.contentBoxSize instanceof Array) {\n // Chrome 84 implements new version of spec.\n width = entry.contentBoxSize[0].inlineSize;\n height = entry.contentBoxSize[0].blockSize;\n } else {\n // Firefox implements old version of spec.\n width = entry.contentBoxSize.inlineSize;\n height = entry.contentBoxSize.blockSize;\n }\n } else {\n // Chrome <84 implements even older version of spec.\n width = entry.contentRect.width;\n height = entry.contentRect.height;\n }\n\n // Keep the size of the canvas and rubber band canvas in sync with\n // the canvas container.\n if (entry.devicePixelContentBoxSize) {\n // Chrome 84 implements new version of spec.\n canvas.setAttribute(\n 'width',\n entry.devicePixelContentBoxSize[0].inlineSize\n );\n canvas.setAttribute(\n 'height',\n entry.devicePixelContentBoxSize[0].blockSize\n );\n } else {\n canvas.setAttribute('width', width * fig.ratio);\n canvas.setAttribute('height', height * fig.ratio);\n }\n canvas.setAttribute(\n 'style',\n 'width: ' + width + 'px; height: ' + height + 'px;'\n );\n\n rubberband_canvas.setAttribute('width', width);\n rubberband_canvas.setAttribute('height', height);\n\n // And update the size in Python. We ignore the initial 0/0 size\n // that occurs as the element is placed into the DOM, which should\n // otherwise not happen due to the minimum size styling.\n if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n fig.request_resize(width, height);\n }\n }\n });\n this.resizeObserverInstance.observe(canvas_div);\n\n function on_mouse_event_closure(name) {\n return function (event) {\n return fig.mouse_event(event, name);\n };\n }\n\n rubberband_canvas.addEventListener(\n 'mousedown',\n on_mouse_event_closure('button_press')\n );\n rubberband_canvas.addEventListener(\n 'mouseup',\n on_mouse_event_closure('button_release')\n );\n // Throttle sequential mouse events to 1 every 20ms.\n rubberband_canvas.addEventListener(\n 'mousemove',\n on_mouse_event_closure('motion_notify')\n );\n\n rubberband_canvas.addEventListener(\n 'mouseenter',\n on_mouse_event_closure('figure_enter')\n );\n rubberband_canvas.addEventListener(\n 'mouseleave',\n on_mouse_event_closure('figure_leave')\n );\n\n canvas_div.addEventListener('wheel', function (event) {\n if (event.deltaY < 0) {\n event.step = 1;\n } else {\n event.step = -1;\n }\n on_mouse_event_closure('scroll')(event);\n });\n\n canvas_div.appendChild(canvas);\n canvas_div.appendChild(rubberband_canvas);\n\n this.rubberband_context = rubberband_canvas.getContext('2d');\n this.rubberband_context.strokeStyle = '#000000';\n\n this._resize_canvas = function (width, height, forward) {\n if (forward) {\n canvas_div.style.width = width + 'px';\n canvas_div.style.height = height + 'px';\n }\n };\n\n // Disable right mouse context menu.\n this.rubberband_canvas.addEventListener('contextmenu', function (_e) {\n event.preventDefault();\n return false;\n });\n\n function set_focus() {\n canvas.focus();\n canvas_div.focus();\n }\n\n window.setTimeout(set_focus, 100);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'mpl-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n continue;\n }\n\n var button = (fig.buttons[name] = document.createElement('button'));\n button.classList = 'mpl-widget';\n button.setAttribute('role', 'button');\n button.setAttribute('aria-disabled', 'false');\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n\n var icon_img = document.createElement('img');\n icon_img.src = '_images/' + image + '.png';\n icon_img.srcset = '_images/' + image + '_large.png 2x';\n icon_img.alt = tooltip;\n button.appendChild(icon_img);\n\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n var fmt_picker = document.createElement('select');\n fmt_picker.classList = 'mpl-widget';\n toolbar.appendChild(fmt_picker);\n this.format_dropdown = fmt_picker;\n\n for (var ind in mpl.extensions) {\n var fmt = mpl.extensions[ind];\n var option = document.createElement('option');\n option.selected = fmt === mpl.default_extension;\n option.innerHTML = fmt;\n fmt_picker.appendChild(option);\n }\n\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n};\n\nmpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n // which will in turn request a refresh of the image.\n this.send_message('resize', { width: x_pixels, height: y_pixels });\n};\n\nmpl.figure.prototype.send_message = function (type, properties) {\n properties['type'] = type;\n properties['figure_id'] = this.id;\n this.ws.send(JSON.stringify(properties));\n};\n\nmpl.figure.prototype.send_draw_message = function () {\n if (!this.waiting) {\n this.waiting = true;\n this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n var format_dropdown = fig.format_dropdown;\n var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n fig.ondownload(fig, format);\n};\n\nmpl.figure.prototype.handle_resize = function (fig, msg) {\n var size = msg['size'];\n if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n fig._resize_canvas(size[0], size[1], msg['forward']);\n fig.send_message('refresh', {});\n }\n};\n\nmpl.figure.prototype.handle_rubberband = function (fig, msg) {\n var x0 = msg['x0'] / fig.ratio;\n var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n var x1 = msg['x1'] / fig.ratio;\n var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n x0 = Math.floor(x0) + 0.5;\n y0 = Math.floor(y0) + 0.5;\n x1 = Math.floor(x1) + 0.5;\n y1 = Math.floor(y1) + 0.5;\n var min_x = Math.min(x0, x1);\n var min_y = Math.min(y0, y1);\n var width = Math.abs(x1 - x0);\n var height = Math.abs(y1 - y0);\n\n fig.rubberband_context.clearRect(\n 0,\n 0,\n fig.canvas.width / fig.ratio,\n fig.canvas.height / fig.ratio\n );\n\n fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n};\n\nmpl.figure.prototype.handle_figure_label = function (fig, msg) {\n // Updates the figure title.\n fig.header.textContent = msg['label'];\n};\n\nmpl.figure.prototype.handle_cursor = function (fig, msg) {\n var cursor = msg['cursor'];\n switch (cursor) {\n case 0:\n cursor = 'pointer';\n break;\n case 1:\n cursor = 'default';\n break;\n case 2:\n cursor = 'crosshair';\n break;\n case 3:\n cursor = 'move';\n break;\n }\n fig.rubberband_canvas.style.cursor = cursor;\n};\n\nmpl.figure.prototype.handle_message = function (fig, msg) {\n fig.message.textContent = msg['message'];\n};\n\nmpl.figure.prototype.handle_draw = function (fig, _msg) {\n // Request the server to send over a new figure.\n fig.send_draw_message();\n};\n\nmpl.figure.prototype.handle_image_mode = function (fig, msg) {\n fig.image_mode = msg['mode'];\n};\n\nmpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n for (var key in msg) {\n if (!(key in fig.buttons)) {\n continue;\n }\n fig.buttons[key].disabled = !msg[key];\n fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n }\n};\n\nmpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n if (msg['mode'] === 'PAN') {\n fig.buttons['Pan'].classList.add('active');\n fig.buttons['Zoom'].classList.remove('active');\n } else if (msg['mode'] === 'ZOOM') {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.add('active');\n } else {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.remove('active');\n }\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Called whenever the canvas gets updated.\n this.send_message('ack', {});\n};\n\n// A function to construct a web socket function for onmessage handling.\n// Called in the figure constructor.\nmpl.figure.prototype._make_on_message_function = function (fig) {\n return function socket_on_message(evt) {\n if (evt.data instanceof Blob) {\n /* FIXME: We get \"Resource interpreted as Image but\n * transferred with MIME type text/plain:\" errors on\n * Chrome. But how to set the MIME type? It doesn't seem\n * to be part of the websocket stream */\n evt.data.type = 'image/png';\n\n /* Free the memory for the previous frames */\n if (fig.imageObj.src) {\n (window.URL || window.webkitURL).revokeObjectURL(\n fig.imageObj.src\n );\n }\n\n fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n evt.data\n );\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n } else if (\n typeof evt.data === 'string' &&\n evt.data.slice(0, 21) === 'data:image/png;base64'\n ) {\n fig.imageObj.src = evt.data;\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n }\n\n var msg = JSON.parse(evt.data);\n var msg_type = msg['type'];\n\n // Call the \"handle_{type}\" callback, which takes\n // the figure and JSON message as its only arguments.\n try {\n var callback = fig['handle_' + msg_type];\n } catch (e) {\n console.log(\n \"No handler for the '\" + msg_type + \"' message type: \",\n msg\n );\n return;\n }\n\n if (callback) {\n try {\n // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n callback(fig, msg);\n } catch (e) {\n console.log(\n \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n e,\n e.stack,\n msg\n );\n }\n }\n };\n};\n\n// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\nmpl.findpos = function (e) {\n //this section is from http://www.quirksmode.org/js/events_properties.html\n var targ;\n if (!e) {\n e = window.event;\n }\n if (e.target) {\n targ = e.target;\n } else if (e.srcElement) {\n targ = e.srcElement;\n }\n if (targ.nodeType === 3) {\n // defeat Safari bug\n targ = targ.parentNode;\n }\n\n // pageX,Y are the mouse positions relative to the document\n var boundingRect = targ.getBoundingClientRect();\n var x = e.pageX - (boundingRect.left + document.body.scrollLeft);\n var y = e.pageY - (boundingRect.top + document.body.scrollTop);\n\n return { x: x, y: y };\n};\n\n/*\n * return a copy of an object with only non-object keys\n * we need this to avoid circular references\n * http://stackoverflow.com/a/24161582/3208463\n */\nfunction simpleKeys(original) {\n return Object.keys(original).reduce(function (obj, key) {\n if (typeof original[key] !== 'object') {\n obj[key] = original[key];\n }\n return obj;\n }, {});\n}\n\nmpl.figure.prototype.mouse_event = function (event, name) {\n var canvas_pos = mpl.findpos(event);\n\n if (name === 'button_press') {\n this.canvas.focus();\n this.canvas_div.focus();\n }\n\n var x = canvas_pos.x * this.ratio;\n var y = canvas_pos.y * this.ratio;\n\n this.send_message(name, {\n x: x,\n y: y,\n button: event.button,\n step: event.step,\n guiEvent: simpleKeys(event),\n });\n\n /* This prevents the web browser from automatically changing to\n * the text insertion cursor when the button is pressed. We want\n * to control all of the cursor setting manually through the\n * 'cursor' event from matplotlib */\n event.preventDefault();\n return false;\n};\n\nmpl.figure.prototype._key_event_extra = function (_event, _name) {\n // Handle any extra behaviour associated with a key event\n};\n\nmpl.figure.prototype.key_event = function (event, name) {\n // Prevent repeat events\n if (name === 'key_press') {\n if (event.which === this._key) {\n return;\n } else {\n this._key = event.which;\n }\n }\n if (name === 'key_release') {\n this._key = null;\n }\n\n var value = '';\n if (event.ctrlKey && event.which !== 17) {\n value += 'ctrl+';\n }\n if (event.altKey && event.which !== 18) {\n value += 'alt+';\n }\n if (event.shiftKey && event.which !== 16) {\n value += 'shift+';\n }\n\n value += 'k';\n value += event.which.toString();\n\n this._key_event_extra(event, name);\n\n this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n return false;\n};\n\nmpl.figure.prototype.toolbar_button_onclick = function (name) {\n if (name === 'download') {\n this.handle_save(this, null);\n } else {\n this.send_message('toolbar_button', { name: name });\n }\n};\n\nmpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n this.message.textContent = tooltip;\n};\n\n///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n// prettier-ignore\nvar _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\nmpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n\nmpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n\nmpl.default_extension = \"png\";/* global mpl */\n\nvar comm_websocket_adapter = function (comm) {\n // Create a \"websocket\"-like object which calls the given IPython comm\n // object with the appropriate methods. Currently this is a non binary\n // socket, so there is still some room for performance tuning.\n var ws = {};\n\n ws.close = function () {\n comm.close();\n };\n ws.send = function (m) {\n //console.log('sending', m);\n comm.send(m);\n };\n // Register the callback with on_msg.\n comm.on_msg(function (msg) {\n //console.log('receiving', msg['content']['data'], msg);\n // Pass the mpl event to the overridden (by mpl) onmessage function.\n ws.onmessage(msg['content']['data']);\n });\n return ws;\n};\n\nmpl.mpl_figure_comm = function (comm, msg) {\n // This is the function which gets called when the mpl process\n // starts-up an IPython Comm through the \"matplotlib\" channel.\n\n var id = msg.content.data.id;\n // Get hold of the div created by the display call when the Comm\n // socket was opened in Python.\n var element = document.getElementById(id);\n var ws_proxy = comm_websocket_adapter(comm);\n\n function ondownload(figure, _format) {\n window.open(figure.canvas.toDataURL());\n }\n\n var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n\n // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n // web socket which is closed, not our websocket->open comm proxy.\n ws_proxy.onopen();\n\n fig.parent_element = element;\n fig.cell_info = mpl.find_output_cell(\"
\");\n if (!fig.cell_info) {\n console.error('Failed to find cell for figure', id, fig);\n return;\n }\n fig.cell_info[0].output_area.element.on(\n 'cleared',\n { fig: fig },\n fig._remove_fig_handler\n );\n};\n\nmpl.figure.prototype.handle_close = function (fig, msg) {\n var width = fig.canvas.width / fig.ratio;\n fig.cell_info[0].output_area.element.off(\n 'cleared',\n fig._remove_fig_handler\n );\n fig.resizeObserverInstance.unobserve(fig.canvas_div);\n\n // Update the output cell to use the data from the current canvas.\n fig.push_to_output();\n var dataURL = fig.canvas.toDataURL();\n // Re-enable the keyboard manager in IPython - without this line, in FF,\n // the notebook keyboard shortcuts fail.\n IPython.keyboard_manager.enable();\n fig.parent_element.innerHTML =\n '';\n fig.close_ws(fig, msg);\n};\n\nmpl.figure.prototype.close_ws = function (fig, msg) {\n fig.send_message('closing', msg);\n // fig.ws.close()\n};\n\nmpl.figure.prototype.push_to_output = function (_remove_interactive) {\n // Turn the data on the canvas into data in the output cell.\n var width = this.canvas.width / this.ratio;\n var dataURL = this.canvas.toDataURL();\n this.cell_info[1]['text/html'] =\n '';\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Tell IPython that the notebook contents must change.\n IPython.notebook.set_dirty(true);\n this.send_message('ack', {});\n var fig = this;\n // Wait a second, then push the new image to the DOM so\n // that it is saved nicely (might be nice to debounce this).\n setTimeout(function () {\n fig.push_to_output();\n }, 1000);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'btn-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n var button;\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n continue;\n }\n\n button = fig.buttons[name] = document.createElement('button');\n button.classList = 'btn btn-default';\n button.href = '#';\n button.title = name;\n button.innerHTML = '';\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n // Add the status bar.\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message pull-right';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n\n // Add the close button to the window.\n var buttongrp = document.createElement('div');\n buttongrp.classList = 'btn-group inline pull-right';\n button = document.createElement('button');\n button.classList = 'btn btn-mini btn-primary';\n button.href = '#';\n button.title = 'Stop Interaction';\n button.innerHTML = '';\n button.addEventListener('click', function (_evt) {\n fig.handle_close(fig, {});\n });\n button.addEventListener(\n 'mouseover',\n on_mouseover_closure('Stop Interaction')\n );\n buttongrp.appendChild(button);\n var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n titlebar.insertBefore(buttongrp, titlebar.firstChild);\n};\n\nmpl.figure.prototype._remove_fig_handler = function (event) {\n var fig = event.data.fig;\n if (event.target !== this) {\n // Ignore bubbled events from children.\n return;\n }\n fig.close_ws(fig, {});\n};\n\nmpl.figure.prototype._root_extra_style = function (el) {\n el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n};\n\nmpl.figure.prototype._canvas_extra_style = function (el) {\n // this is important to make the div 'focusable\n el.setAttribute('tabindex', 0);\n // reach out to IPython and tell the keyboard manager to turn it's self\n // off when our div gets focus\n\n // location in version 3\n if (IPython.notebook.keyboard_manager) {\n IPython.notebook.keyboard_manager.register_events(el);\n } else {\n // location in version 2\n IPython.keyboard_manager.register_events(el);\n }\n};\n\nmpl.figure.prototype._key_event_extra = function (event, _name) {\n var manager = IPython.notebook.keyboard_manager;\n if (!manager) {\n manager = IPython.keyboard_manager;\n }\n\n // Check for shift+enter\n if (event.shiftKey && event.which === 13) {\n this.canvas_div.blur();\n // select the cell after this one\n var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n IPython.notebook.select(index + 1);\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n fig.ondownload(fig, null);\n};\n\nmpl.find_output_cell = function (html_output) {\n // Return the cell and output element which can be found *uniquely* in the notebook.\n // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n // IPython event is triggered only after the cells have been serialised, which for\n // our purposes (turning an active figure into a static one), is too late.\n var cells = IPython.notebook.get_cells();\n var ncells = cells.length;\n for (var i = 0; i < ncells; i++) {\n var cell = cells[i];\n if (cell.cell_type === 'code') {\n for (var j = 0; j < cell.output_area.outputs.length; j++) {\n var data = cell.output_area.outputs[j];\n if (data.data) {\n // IPython >= 3 moved mimebundle to data attribute of output\n data = data.data;\n }\n if (data['text/html'] === html_output) {\n return [cell, data, j];\n }\n }\n }\n }\n};\n\n// Register the function which deals with the matplotlib target/channel.\n// The kernel may be null if the page has been refreshed.\nif (IPython.notebook.kernel !== null) {\n IPython.notebook.kernel.comm_manager.register_target(\n 'matplotlib',\n mpl.mpl_figure_comm\n );\n}\n", "text/plain": [ "" ] @@ -2310,955 +414,7 @@ "outputs": [ { "data": { - "application/javascript": [ - "/* Put everything inside the global mpl namespace */\n", - "/* global mpl */\n", - "window.mpl = {};\n", - "\n", - "mpl.get_websocket_type = function () {\n", - " if (typeof WebSocket !== 'undefined') {\n", - " return WebSocket;\n", - " } else if (typeof MozWebSocket !== 'undefined') {\n", - " return MozWebSocket;\n", - " } else {\n", - " alert(\n", - " 'Your browser does not have WebSocket support. ' +\n", - " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", - " 'Firefox 4 and 5 are also supported but you ' +\n", - " 'have to enable WebSockets in about:config.'\n", - " );\n", - " }\n", - "};\n", - "\n", - "mpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n", - " this.id = figure_id;\n", - "\n", - " this.ws = websocket;\n", - "\n", - " this.supports_binary = this.ws.binaryType !== undefined;\n", - "\n", - " if (!this.supports_binary) {\n", - " var warnings = document.getElementById('mpl-warnings');\n", - " if (warnings) {\n", - " warnings.style.display = 'block';\n", - " warnings.textContent =\n", - " 'This browser does not support binary websocket messages. ' +\n", - " 'Performance may be slow.';\n", - " }\n", - " }\n", - "\n", - " this.imageObj = new Image();\n", - "\n", - " this.context = undefined;\n", - " this.message = undefined;\n", - " this.canvas = undefined;\n", - " this.rubberband_canvas = undefined;\n", - " this.rubberband_context = undefined;\n", - " this.format_dropdown = undefined;\n", - "\n", - " this.image_mode = 'full';\n", - "\n", - " this.root = document.createElement('div');\n", - " this.root.setAttribute('style', 'display: inline-block');\n", - " this._root_extra_style(this.root);\n", - "\n", - " parent_element.appendChild(this.root);\n", - "\n", - " this._init_header(this);\n", - " this._init_canvas(this);\n", - " this._init_toolbar(this);\n", - "\n", - " var fig = this;\n", - "\n", - " this.waiting = false;\n", - "\n", - " this.ws.onopen = function () {\n", - " fig.send_message('supports_binary', { value: fig.supports_binary });\n", - " fig.send_message('send_image_mode', {});\n", - " if (fig.ratio !== 1) {\n", - " fig.send_message('set_dpi_ratio', { dpi_ratio: fig.ratio });\n", - " }\n", - " fig.send_message('refresh', {});\n", - " };\n", - "\n", - " this.imageObj.onload = function () {\n", - " if (fig.image_mode === 'full') {\n", - " // Full images could contain transparency (where diff images\n", - " // almost always do), so we need to clear the canvas so that\n", - " // there is no ghosting.\n", - " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", - " }\n", - " fig.context.drawImage(fig.imageObj, 0, 0);\n", - " };\n", - "\n", - " this.imageObj.onunload = function () {\n", - " fig.ws.close();\n", - " };\n", - "\n", - " this.ws.onmessage = this._make_on_message_function(this);\n", - "\n", - " this.ondownload = ondownload;\n", - "};\n", - "\n", - "mpl.figure.prototype._init_header = function () {\n", - " var titlebar = document.createElement('div');\n", - " titlebar.classList =\n", - " 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n", - " var titletext = document.createElement('div');\n", - " titletext.classList = 'ui-dialog-title';\n", - " titletext.setAttribute(\n", - " 'style',\n", - " 'width: 100%; text-align: center; padding: 3px;'\n", - " );\n", - " titlebar.appendChild(titletext);\n", - " this.root.appendChild(titlebar);\n", - " this.header = titletext;\n", - "};\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n", - "\n", - "mpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n", - "\n", - "mpl.figure.prototype._init_canvas = function () {\n", - " var fig = this;\n", - "\n", - " var canvas_div = (this.canvas_div = document.createElement('div'));\n", - " canvas_div.setAttribute(\n", - " 'style',\n", - " 'border: 1px solid #ddd;' +\n", - " 'box-sizing: content-box;' +\n", - " 'clear: both;' +\n", - " 'min-height: 1px;' +\n", - " 'min-width: 1px;' +\n", - " 'outline: 0;' +\n", - " 'overflow: hidden;' +\n", - " 'position: relative;' +\n", - " 'resize: both;'\n", - " );\n", - "\n", - " function on_keyboard_event_closure(name) {\n", - " return function (event) {\n", - " return fig.key_event(event, name);\n", - " };\n", - " }\n", - "\n", - " canvas_div.addEventListener(\n", - " 'keydown',\n", - " on_keyboard_event_closure('key_press')\n", - " );\n", - " canvas_div.addEventListener(\n", - " 'keyup',\n", - " on_keyboard_event_closure('key_release')\n", - " );\n", - "\n", - " this._canvas_extra_style(canvas_div);\n", - " this.root.appendChild(canvas_div);\n", - "\n", - " var canvas = (this.canvas = document.createElement('canvas'));\n", - " canvas.classList.add('mpl-canvas');\n", - " canvas.setAttribute('style', 'box-sizing: content-box;');\n", - "\n", - " this.context = canvas.getContext('2d');\n", - "\n", - " var backingStore =\n", - " this.context.backingStorePixelRatio ||\n", - " this.context.webkitBackingStorePixelRatio ||\n", - " this.context.mozBackingStorePixelRatio ||\n", - " this.context.msBackingStorePixelRatio ||\n", - " this.context.oBackingStorePixelRatio ||\n", - " this.context.backingStorePixelRatio ||\n", - " 1;\n", - "\n", - " this.ratio = (window.devicePixelRatio || 1) / backingStore;\n", - "\n", - " var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n", - " 'canvas'\n", - " ));\n", - " rubberband_canvas.setAttribute(\n", - " 'style',\n", - " 'box-sizing: content-box; position: absolute; left: 0; top: 0; z-index: 1;'\n", - " );\n", - "\n", - " // Apply a ponyfill if ResizeObserver is not implemented by browser.\n", - " if (this.ResizeObserver === undefined) {\n", - " if (window.ResizeObserver !== undefined) {\n", - " this.ResizeObserver = window.ResizeObserver;\n", - " } else {\n", - " var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n", - " this.ResizeObserver = obs.ResizeObserver;\n", - " }\n", - " }\n", - "\n", - " this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n", - " var nentries = entries.length;\n", - " for (var i = 0; i < nentries; i++) {\n", - " var entry = entries[i];\n", - " var width, height;\n", - " if (entry.contentBoxSize) {\n", - " if (entry.contentBoxSize instanceof Array) {\n", - " // Chrome 84 implements new version of spec.\n", - " width = entry.contentBoxSize[0].inlineSize;\n", - " height = entry.contentBoxSize[0].blockSize;\n", - " } else {\n", - " // Firefox implements old version of spec.\n", - " width = entry.contentBoxSize.inlineSize;\n", - " height = entry.contentBoxSize.blockSize;\n", - " }\n", - " } else {\n", - " // Chrome <84 implements even older version of spec.\n", - " width = entry.contentRect.width;\n", - " height = entry.contentRect.height;\n", - " }\n", - "\n", - " // Keep the size of the canvas and rubber band canvas in sync with\n", - " // the canvas container.\n", - " if (entry.devicePixelContentBoxSize) {\n", - " // Chrome 84 implements new version of spec.\n", - " canvas.setAttribute(\n", - " 'width',\n", - " entry.devicePixelContentBoxSize[0].inlineSize\n", - " );\n", - " canvas.setAttribute(\n", - " 'height',\n", - " entry.devicePixelContentBoxSize[0].blockSize\n", - " );\n", - " } else {\n", - " canvas.setAttribute('width', width * fig.ratio);\n", - " canvas.setAttribute('height', height * fig.ratio);\n", - " }\n", - " canvas.setAttribute(\n", - " 'style',\n", - " 'width: ' + width + 'px; height: ' + height + 'px;'\n", - " );\n", - "\n", - " rubberband_canvas.setAttribute('width', width);\n", - " rubberband_canvas.setAttribute('height', height);\n", - "\n", - " // And update the size in Python. We ignore the initial 0/0 size\n", - " // that occurs as the element is placed into the DOM, which should\n", - " // otherwise not happen due to the minimum size styling.\n", - " if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n", - " fig.request_resize(width, height);\n", - " }\n", - " }\n", - " });\n", - " this.resizeObserverInstance.observe(canvas_div);\n", - "\n", - " function on_mouse_event_closure(name) {\n", - " return function (event) {\n", - " return fig.mouse_event(event, name);\n", - " };\n", - " }\n", - "\n", - " rubberband_canvas.addEventListener(\n", - " 'mousedown',\n", - " on_mouse_event_closure('button_press')\n", - " );\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseup',\n", - " on_mouse_event_closure('button_release')\n", - " );\n", - " // Throttle sequential mouse events to 1 every 20ms.\n", - " rubberband_canvas.addEventListener(\n", - " 'mousemove',\n", - " on_mouse_event_closure('motion_notify')\n", - " );\n", - "\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseenter',\n", - " on_mouse_event_closure('figure_enter')\n", - " );\n", - " rubberband_canvas.addEventListener(\n", - " 'mouseleave',\n", - " on_mouse_event_closure('figure_leave')\n", - " );\n", - "\n", - " canvas_div.addEventListener('wheel', function (event) {\n", - " if (event.deltaY < 0) {\n", - " event.step = 1;\n", - " } else {\n", - " event.step = -1;\n", - " }\n", - " on_mouse_event_closure('scroll')(event);\n", - " });\n", - "\n", - " canvas_div.appendChild(canvas);\n", - " canvas_div.appendChild(rubberband_canvas);\n", - "\n", - " this.rubberband_context = rubberband_canvas.getContext('2d');\n", - " this.rubberband_context.strokeStyle = '#000000';\n", - "\n", - " this._resize_canvas = function (width, height, forward) {\n", - " if (forward) {\n", - " canvas_div.style.width = width + 'px';\n", - " canvas_div.style.height = height + 'px';\n", - " }\n", - " };\n", - "\n", - " // Disable right mouse context menu.\n", - " this.rubberband_canvas.addEventListener('contextmenu', function (_e) {\n", - " event.preventDefault();\n", - " return false;\n", - " });\n", - "\n", - " function set_focus() {\n", - " canvas.focus();\n", - " canvas_div.focus();\n", - " }\n", - "\n", - " window.setTimeout(set_focus, 100);\n", - "};\n", - "\n", - "mpl.figure.prototype._init_toolbar = function () {\n", - " var fig = this;\n", - "\n", - " var toolbar = document.createElement('div');\n", - " toolbar.classList = 'mpl-toolbar';\n", - " this.root.appendChild(toolbar);\n", - "\n", - " function on_click_closure(name) {\n", - " return function (_event) {\n", - " return fig.toolbar_button_onclick(name);\n", - " };\n", - " }\n", - "\n", - " function on_mouseover_closure(tooltip) {\n", - " return function (event) {\n", - " if (!event.currentTarget.disabled) {\n", - " return fig.toolbar_button_onmouseover(tooltip);\n", - " }\n", - " };\n", - " }\n", - "\n", - " fig.buttons = {};\n", - " var buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'mpl-button-group';\n", - " for (var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " /* Instead of a spacer, we start a new button group. */\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - " buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'mpl-button-group';\n", - " continue;\n", - " }\n", - "\n", - " var button = (fig.buttons[name] = document.createElement('button'));\n", - " button.classList = 'mpl-widget';\n", - " button.setAttribute('role', 'button');\n", - " button.setAttribute('aria-disabled', 'false');\n", - " button.addEventListener('click', on_click_closure(method_name));\n", - " button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n", - "\n", - " var icon_img = document.createElement('img');\n", - " icon_img.src = '_images/' + image + '.png';\n", - " icon_img.srcset = '_images/' + image + '_large.png 2x';\n", - " icon_img.alt = tooltip;\n", - " button.appendChild(icon_img);\n", - "\n", - " buttonGroup.appendChild(button);\n", - " }\n", - "\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - "\n", - " var fmt_picker = document.createElement('select');\n", - " fmt_picker.classList = 'mpl-widget';\n", - " toolbar.appendChild(fmt_picker);\n", - " this.format_dropdown = fmt_picker;\n", - "\n", - " for (var ind in mpl.extensions) {\n", - " var fmt = mpl.extensions[ind];\n", - " var option = document.createElement('option');\n", - " option.selected = fmt === mpl.default_extension;\n", - " option.innerHTML = fmt;\n", - " fmt_picker.appendChild(option);\n", - " }\n", - "\n", - " var status_bar = document.createElement('span');\n", - " status_bar.classList = 'mpl-message';\n", - " toolbar.appendChild(status_bar);\n", - " this.message = status_bar;\n", - "};\n", - "\n", - "mpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n", - " // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n", - " // which will in turn request a refresh of the image.\n", - " this.send_message('resize', { width: x_pixels, height: y_pixels });\n", - "};\n", - "\n", - "mpl.figure.prototype.send_message = function (type, properties) {\n", - " properties['type'] = type;\n", - " properties['figure_id'] = this.id;\n", - " this.ws.send(JSON.stringify(properties));\n", - "};\n", - "\n", - "mpl.figure.prototype.send_draw_message = function () {\n", - " if (!this.waiting) {\n", - " this.waiting = true;\n", - " this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_save = function (fig, _msg) {\n", - " var format_dropdown = fig.format_dropdown;\n", - " var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n", - " fig.ondownload(fig, format);\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_resize = function (fig, msg) {\n", - " var size = msg['size'];\n", - " if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n", - " fig._resize_canvas(size[0], size[1], msg['forward']);\n", - " fig.send_message('refresh', {});\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_rubberband = function (fig, msg) {\n", - " var x0 = msg['x0'] / fig.ratio;\n", - " var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n", - " var x1 = msg['x1'] / fig.ratio;\n", - " var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n", - " x0 = Math.floor(x0) + 0.5;\n", - " y0 = Math.floor(y0) + 0.5;\n", - " x1 = Math.floor(x1) + 0.5;\n", - " y1 = Math.floor(y1) + 0.5;\n", - " var min_x = Math.min(x0, x1);\n", - " var min_y = Math.min(y0, y1);\n", - " var width = Math.abs(x1 - x0);\n", - " var height = Math.abs(y1 - y0);\n", - "\n", - " fig.rubberband_context.clearRect(\n", - " 0,\n", - " 0,\n", - " fig.canvas.width / fig.ratio,\n", - " fig.canvas.height / fig.ratio\n", - " );\n", - "\n", - " fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_figure_label = function (fig, msg) {\n", - " // Updates the figure title.\n", - " fig.header.textContent = msg['label'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_cursor = function (fig, msg) {\n", - " var cursor = msg['cursor'];\n", - " switch (cursor) {\n", - " case 0:\n", - " cursor = 'pointer';\n", - " break;\n", - " case 1:\n", - " cursor = 'default';\n", - " break;\n", - " case 2:\n", - " cursor = 'crosshair';\n", - " break;\n", - " case 3:\n", - " cursor = 'move';\n", - " break;\n", - " }\n", - " fig.rubberband_canvas.style.cursor = cursor;\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_message = function (fig, msg) {\n", - " fig.message.textContent = msg['message'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_draw = function (fig, _msg) {\n", - " // Request the server to send over a new figure.\n", - " fig.send_draw_message();\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_image_mode = function (fig, msg) {\n", - " fig.image_mode = msg['mode'];\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n", - " for (var key in msg) {\n", - " if (!(key in fig.buttons)) {\n", - " continue;\n", - " }\n", - " fig.buttons[key].disabled = !msg[key];\n", - " fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n", - " if (msg['mode'] === 'PAN') {\n", - " fig.buttons['Pan'].classList.add('active');\n", - " fig.buttons['Zoom'].classList.remove('active');\n", - " } else if (msg['mode'] === 'ZOOM') {\n", - " fig.buttons['Pan'].classList.remove('active');\n", - " fig.buttons['Zoom'].classList.add('active');\n", - " } else {\n", - " fig.buttons['Pan'].classList.remove('active');\n", - " fig.buttons['Zoom'].classList.remove('active');\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.updated_canvas_event = function () {\n", - " // Called whenever the canvas gets updated.\n", - " this.send_message('ack', {});\n", - "};\n", - "\n", - "// A function to construct a web socket function for onmessage handling.\n", - "// Called in the figure constructor.\n", - "mpl.figure.prototype._make_on_message_function = function (fig) {\n", - " return function socket_on_message(evt) {\n", - " if (evt.data instanceof Blob) {\n", - " /* FIXME: We get \"Resource interpreted as Image but\n", - " * transferred with MIME type text/plain:\" errors on\n", - " * Chrome. But how to set the MIME type? It doesn't seem\n", - " * to be part of the websocket stream */\n", - " evt.data.type = 'image/png';\n", - "\n", - " /* Free the memory for the previous frames */\n", - " if (fig.imageObj.src) {\n", - " (window.URL || window.webkitURL).revokeObjectURL(\n", - " fig.imageObj.src\n", - " );\n", - " }\n", - "\n", - " fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n", - " evt.data\n", - " );\n", - " fig.updated_canvas_event();\n", - " fig.waiting = false;\n", - " return;\n", - " } else if (\n", - " typeof evt.data === 'string' &&\n", - " evt.data.slice(0, 21) === 'data:image/png;base64'\n", - " ) {\n", - " fig.imageObj.src = evt.data;\n", - " fig.updated_canvas_event();\n", - " fig.waiting = false;\n", - " return;\n", - " }\n", - "\n", - " var msg = JSON.parse(evt.data);\n", - " var msg_type = msg['type'];\n", - "\n", - " // Call the \"handle_{type}\" callback, which takes\n", - " // the figure and JSON message as its only arguments.\n", - " try {\n", - " var callback = fig['handle_' + msg_type];\n", - " } catch (e) {\n", - " console.log(\n", - " \"No handler for the '\" + msg_type + \"' message type: \",\n", - " msg\n", - " );\n", - " return;\n", - " }\n", - "\n", - " if (callback) {\n", - " try {\n", - " // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n", - " callback(fig, msg);\n", - " } catch (e) {\n", - " console.log(\n", - " \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n", - " e,\n", - " e.stack,\n", - " msg\n", - " );\n", - " }\n", - " }\n", - " };\n", - "};\n", - "\n", - "// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\n", - "mpl.findpos = function (e) {\n", - " //this section is from http://www.quirksmode.org/js/events_properties.html\n", - " var targ;\n", - " if (!e) {\n", - " e = window.event;\n", - " }\n", - " if (e.target) {\n", - " targ = e.target;\n", - " } else if (e.srcElement) {\n", - " targ = e.srcElement;\n", - " }\n", - " if (targ.nodeType === 3) {\n", - " // defeat Safari bug\n", - " targ = targ.parentNode;\n", - " }\n", - "\n", - " // pageX,Y are the mouse positions relative to the document\n", - " var boundingRect = targ.getBoundingClientRect();\n", - " var x = e.pageX - (boundingRect.left + document.body.scrollLeft);\n", - " var y = e.pageY - (boundingRect.top + document.body.scrollTop);\n", - "\n", - " return { x: x, y: y };\n", - "};\n", - "\n", - "/*\n", - " * return a copy of an object with only non-object keys\n", - " * we need this to avoid circular references\n", - " * http://stackoverflow.com/a/24161582/3208463\n", - " */\n", - "function simpleKeys(original) {\n", - " return Object.keys(original).reduce(function (obj, key) {\n", - " if (typeof original[key] !== 'object') {\n", - " obj[key] = original[key];\n", - " }\n", - " return obj;\n", - " }, {});\n", - "}\n", - "\n", - "mpl.figure.prototype.mouse_event = function (event, name) {\n", - " var canvas_pos = mpl.findpos(event);\n", - "\n", - " if (name === 'button_press') {\n", - " this.canvas.focus();\n", - " this.canvas_div.focus();\n", - " }\n", - "\n", - " var x = canvas_pos.x * this.ratio;\n", - " var y = canvas_pos.y * this.ratio;\n", - "\n", - " this.send_message(name, {\n", - " x: x,\n", - " y: y,\n", - " button: event.button,\n", - " step: event.step,\n", - " guiEvent: simpleKeys(event),\n", - " });\n", - "\n", - " /* This prevents the web browser from automatically changing to\n", - " * the text insertion cursor when the button is pressed. We want\n", - " * to control all of the cursor setting manually through the\n", - " * 'cursor' event from matplotlib */\n", - " event.preventDefault();\n", - " return false;\n", - "};\n", - "\n", - "mpl.figure.prototype._key_event_extra = function (_event, _name) {\n", - " // Handle any extra behaviour associated with a key event\n", - "};\n", - "\n", - "mpl.figure.prototype.key_event = function (event, name) {\n", - " // Prevent repeat events\n", - " if (name === 'key_press') {\n", - " if (event.which === this._key) {\n", - " return;\n", - " } else {\n", - " this._key = event.which;\n", - " }\n", - " }\n", - " if (name === 'key_release') {\n", - " this._key = null;\n", - " }\n", - "\n", - " var value = '';\n", - " if (event.ctrlKey && event.which !== 17) {\n", - " value += 'ctrl+';\n", - " }\n", - " if (event.altKey && event.which !== 18) {\n", - " value += 'alt+';\n", - " }\n", - " if (event.shiftKey && event.which !== 16) {\n", - " value += 'shift+';\n", - " }\n", - "\n", - " value += 'k';\n", - " value += event.which.toString();\n", - "\n", - " this._key_event_extra(event, name);\n", - "\n", - " this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n", - " return false;\n", - "};\n", - "\n", - "mpl.figure.prototype.toolbar_button_onclick = function (name) {\n", - " if (name === 'download') {\n", - " this.handle_save(this, null);\n", - " } else {\n", - " this.send_message('toolbar_button', { name: name });\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n", - " this.message.textContent = tooltip;\n", - "};\n", - "\n", - "///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n", - "// prettier-ignore\n", - "var _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\n", - "mpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n", - "\n", - "mpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n", - "\n", - "mpl.default_extension = \"png\";/* global mpl */\n", - "\n", - "var comm_websocket_adapter = function (comm) {\n", - " // Create a \"websocket\"-like object which calls the given IPython comm\n", - " // object with the appropriate methods. Currently this is a non binary\n", - " // socket, so there is still some room for performance tuning.\n", - " var ws = {};\n", - "\n", - " ws.close = function () {\n", - " comm.close();\n", - " };\n", - " ws.send = function (m) {\n", - " //console.log('sending', m);\n", - " comm.send(m);\n", - " };\n", - " // Register the callback with on_msg.\n", - " comm.on_msg(function (msg) {\n", - " //console.log('receiving', msg['content']['data'], msg);\n", - " // Pass the mpl event to the overridden (by mpl) onmessage function.\n", - " ws.onmessage(msg['content']['data']);\n", - " });\n", - " return ws;\n", - "};\n", - "\n", - "mpl.mpl_figure_comm = function (comm, msg) {\n", - " // This is the function which gets called when the mpl process\n", - " // starts-up an IPython Comm through the \"matplotlib\" channel.\n", - "\n", - " var id = msg.content.data.id;\n", - " // Get hold of the div created by the display call when the Comm\n", - " // socket was opened in Python.\n", - " var element = document.getElementById(id);\n", - " var ws_proxy = comm_websocket_adapter(comm);\n", - "\n", - " function ondownload(figure, _format) {\n", - " window.open(figure.canvas.toDataURL());\n", - " }\n", - "\n", - " var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n", - "\n", - " // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n", - " // web socket which is closed, not our websocket->open comm proxy.\n", - " ws_proxy.onopen();\n", - "\n", - " fig.parent_element = element;\n", - " fig.cell_info = mpl.find_output_cell(\"
\");\n", - " if (!fig.cell_info) {\n", - " console.error('Failed to find cell for figure', id, fig);\n", - " return;\n", - " }\n", - " fig.cell_info[0].output_area.element.on(\n", - " 'cleared',\n", - " { fig: fig },\n", - " fig._remove_fig_handler\n", - " );\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_close = function (fig, msg) {\n", - " var width = fig.canvas.width / fig.ratio;\n", - " fig.cell_info[0].output_area.element.off(\n", - " 'cleared',\n", - " fig._remove_fig_handler\n", - " );\n", - " fig.resizeObserverInstance.unobserve(fig.canvas_div);\n", - "\n", - " // Update the output cell to use the data from the current canvas.\n", - " fig.push_to_output();\n", - " var dataURL = fig.canvas.toDataURL();\n", - " // Re-enable the keyboard manager in IPython - without this line, in FF,\n", - " // the notebook keyboard shortcuts fail.\n", - " IPython.keyboard_manager.enable();\n", - " fig.parent_element.innerHTML =\n", - " '';\n", - " fig.close_ws(fig, msg);\n", - "};\n", - "\n", - "mpl.figure.prototype.close_ws = function (fig, msg) {\n", - " fig.send_message('closing', msg);\n", - " // fig.ws.close()\n", - "};\n", - "\n", - "mpl.figure.prototype.push_to_output = function (_remove_interactive) {\n", - " // Turn the data on the canvas into data in the output cell.\n", - " var width = this.canvas.width / this.ratio;\n", - " var dataURL = this.canvas.toDataURL();\n", - " this.cell_info[1]['text/html'] =\n", - " '';\n", - "};\n", - "\n", - "mpl.figure.prototype.updated_canvas_event = function () {\n", - " // Tell IPython that the notebook contents must change.\n", - " IPython.notebook.set_dirty(true);\n", - " this.send_message('ack', {});\n", - " var fig = this;\n", - " // Wait a second, then push the new image to the DOM so\n", - " // that it is saved nicely (might be nice to debounce this).\n", - " setTimeout(function () {\n", - " fig.push_to_output();\n", - " }, 1000);\n", - "};\n", - "\n", - "mpl.figure.prototype._init_toolbar = function () {\n", - " var fig = this;\n", - "\n", - " var toolbar = document.createElement('div');\n", - " toolbar.classList = 'btn-toolbar';\n", - " this.root.appendChild(toolbar);\n", - "\n", - " function on_click_closure(name) {\n", - " return function (_event) {\n", - " return fig.toolbar_button_onclick(name);\n", - " };\n", - " }\n", - "\n", - " function on_mouseover_closure(tooltip) {\n", - " return function (event) {\n", - " if (!event.currentTarget.disabled) {\n", - " return fig.toolbar_button_onmouseover(tooltip);\n", - " }\n", - " };\n", - " }\n", - "\n", - " fig.buttons = {};\n", - " var buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'btn-group';\n", - " var button;\n", - " for (var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " /* Instead of a spacer, we start a new button group. */\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - " buttonGroup = document.createElement('div');\n", - " buttonGroup.classList = 'btn-group';\n", - " continue;\n", - " }\n", - "\n", - " button = fig.buttons[name] = document.createElement('button');\n", - " button.classList = 'btn btn-default';\n", - " button.href = '#';\n", - " button.title = name;\n", - " button.innerHTML = '';\n", - " button.addEventListener('click', on_click_closure(method_name));\n", - " button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n", - " buttonGroup.appendChild(button);\n", - " }\n", - "\n", - " if (buttonGroup.hasChildNodes()) {\n", - " toolbar.appendChild(buttonGroup);\n", - " }\n", - "\n", - " // Add the status bar.\n", - " var status_bar = document.createElement('span');\n", - " status_bar.classList = 'mpl-message pull-right';\n", - " toolbar.appendChild(status_bar);\n", - " this.message = status_bar;\n", - "\n", - " // Add the close button to the window.\n", - " var buttongrp = document.createElement('div');\n", - " buttongrp.classList = 'btn-group inline pull-right';\n", - " button = document.createElement('button');\n", - " button.classList = 'btn btn-mini btn-primary';\n", - " button.href = '#';\n", - " button.title = 'Stop Interaction';\n", - " button.innerHTML = '';\n", - " button.addEventListener('click', function (_evt) {\n", - " fig.handle_close(fig, {});\n", - " });\n", - " button.addEventListener(\n", - " 'mouseover',\n", - " on_mouseover_closure('Stop Interaction')\n", - " );\n", - " buttongrp.appendChild(button);\n", - " var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n", - " titlebar.insertBefore(buttongrp, titlebar.firstChild);\n", - "};\n", - "\n", - "mpl.figure.prototype._remove_fig_handler = function (event) {\n", - " var fig = event.data.fig;\n", - " if (event.target !== this) {\n", - " // Ignore bubbled events from children.\n", - " return;\n", - " }\n", - " fig.close_ws(fig, {});\n", - "};\n", - "\n", - "mpl.figure.prototype._root_extra_style = function (el) {\n", - " el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n", - "};\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function (el) {\n", - " // this is important to make the div 'focusable\n", - " el.setAttribute('tabindex', 0);\n", - " // reach out to IPython and tell the keyboard manager to turn it's self\n", - " // off when our div gets focus\n", - "\n", - " // location in version 3\n", - " if (IPython.notebook.keyboard_manager) {\n", - " IPython.notebook.keyboard_manager.register_events(el);\n", - " } else {\n", - " // location in version 2\n", - " IPython.keyboard_manager.register_events(el);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype._key_event_extra = function (event, _name) {\n", - " var manager = IPython.notebook.keyboard_manager;\n", - " if (!manager) {\n", - " manager = IPython.keyboard_manager;\n", - " }\n", - "\n", - " // Check for shift+enter\n", - " if (event.shiftKey && event.which === 13) {\n", - " this.canvas_div.blur();\n", - " // select the cell after this one\n", - " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", - " IPython.notebook.select(index + 1);\n", - " }\n", - "};\n", - "\n", - "mpl.figure.prototype.handle_save = function (fig, _msg) {\n", - " fig.ondownload(fig, null);\n", - "};\n", - "\n", - "mpl.find_output_cell = function (html_output) {\n", - " // Return the cell and output element which can be found *uniquely* in the notebook.\n", - " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", - " // IPython event is triggered only after the cells have been serialised, which for\n", - " // our purposes (turning an active figure into a static one), is too late.\n", - " var cells = IPython.notebook.get_cells();\n", - " var ncells = cells.length;\n", - " for (var i = 0; i < ncells; i++) {\n", - " var cell = cells[i];\n", - " if (cell.cell_type === 'code') {\n", - " for (var j = 0; j < cell.output_area.outputs.length; j++) {\n", - " var data = cell.output_area.outputs[j];\n", - " if (data.data) {\n", - " // IPython >= 3 moved mimebundle to data attribute of output\n", - " data = data.data;\n", - " }\n", - " if (data['text/html'] === html_output) {\n", - " return [cell, data, j];\n", - " }\n", - " }\n", - " }\n", - " }\n", - "};\n", - "\n", - "// Register the function which deals with the matplotlib target/channel.\n", - "// The kernel may be null if the page has been refreshed.\n", - "if (IPython.notebook.kernel !== null) {\n", - " IPython.notebook.kernel.comm_manager.register_target(\n", - " 'matplotlib',\n", - " mpl.mpl_figure_comm\n", - " );\n", - "}\n" - ], + "application/javascript": "/* Put everything inside the global mpl namespace */\n/* global mpl */\nwindow.mpl = {};\n\nmpl.get_websocket_type = function () {\n if (typeof WebSocket !== 'undefined') {\n return WebSocket;\n } else if (typeof MozWebSocket !== 'undefined') {\n return MozWebSocket;\n } else {\n alert(\n 'Your browser does not have WebSocket support. ' +\n 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n 'Firefox 4 and 5 are also supported but you ' +\n 'have to enable WebSockets in about:config.'\n );\n }\n};\n\nmpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n this.id = figure_id;\n\n this.ws = websocket;\n\n this.supports_binary = this.ws.binaryType !== undefined;\n\n if (!this.supports_binary) {\n var warnings = document.getElementById('mpl-warnings');\n if (warnings) {\n warnings.style.display = 'block';\n warnings.textContent =\n 'This browser does not support binary websocket messages. ' +\n 'Performance may be slow.';\n }\n }\n\n this.imageObj = new Image();\n\n this.context = undefined;\n this.message = undefined;\n this.canvas = undefined;\n this.rubberband_canvas = undefined;\n this.rubberband_context = undefined;\n this.format_dropdown = undefined;\n\n this.image_mode = 'full';\n\n this.root = document.createElement('div');\n this.root.setAttribute('style', 'display: inline-block');\n this._root_extra_style(this.root);\n\n parent_element.appendChild(this.root);\n\n this._init_header(this);\n this._init_canvas(this);\n this._init_toolbar(this);\n\n var fig = this;\n\n this.waiting = false;\n\n this.ws.onopen = function () {\n fig.send_message('supports_binary', { value: fig.supports_binary });\n fig.send_message('send_image_mode', {});\n if (fig.ratio !== 1) {\n fig.send_message('set_dpi_ratio', { dpi_ratio: fig.ratio });\n }\n fig.send_message('refresh', {});\n };\n\n this.imageObj.onload = function () {\n if (fig.image_mode === 'full') {\n // Full images could contain transparency (where diff images\n // almost always do), so we need to clear the canvas so that\n // there is no ghosting.\n fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n }\n fig.context.drawImage(fig.imageObj, 0, 0);\n };\n\n this.imageObj.onunload = function () {\n fig.ws.close();\n };\n\n this.ws.onmessage = this._make_on_message_function(this);\n\n this.ondownload = ondownload;\n};\n\nmpl.figure.prototype._init_header = function () {\n var titlebar = document.createElement('div');\n titlebar.classList =\n 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n var titletext = document.createElement('div');\n titletext.classList = 'ui-dialog-title';\n titletext.setAttribute(\n 'style',\n 'width: 100%; text-align: center; padding: 3px;'\n );\n titlebar.appendChild(titletext);\n this.root.appendChild(titlebar);\n this.header = titletext;\n};\n\nmpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._init_canvas = function () {\n var fig = this;\n\n var canvas_div = (this.canvas_div = document.createElement('div'));\n canvas_div.setAttribute(\n 'style',\n 'border: 1px solid #ddd;' +\n 'box-sizing: content-box;' +\n 'clear: both;' +\n 'min-height: 1px;' +\n 'min-width: 1px;' +\n 'outline: 0;' +\n 'overflow: hidden;' +\n 'position: relative;' +\n 'resize: both;'\n );\n\n function on_keyboard_event_closure(name) {\n return function (event) {\n return fig.key_event(event, name);\n };\n }\n\n canvas_div.addEventListener(\n 'keydown',\n on_keyboard_event_closure('key_press')\n );\n canvas_div.addEventListener(\n 'keyup',\n on_keyboard_event_closure('key_release')\n );\n\n this._canvas_extra_style(canvas_div);\n this.root.appendChild(canvas_div);\n\n var canvas = (this.canvas = document.createElement('canvas'));\n canvas.classList.add('mpl-canvas');\n canvas.setAttribute('style', 'box-sizing: content-box;');\n\n this.context = canvas.getContext('2d');\n\n var backingStore =\n this.context.backingStorePixelRatio ||\n this.context.webkitBackingStorePixelRatio ||\n this.context.mozBackingStorePixelRatio ||\n this.context.msBackingStorePixelRatio ||\n this.context.oBackingStorePixelRatio ||\n this.context.backingStorePixelRatio ||\n 1;\n\n this.ratio = (window.devicePixelRatio || 1) / backingStore;\n\n var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n 'canvas'\n ));\n rubberband_canvas.setAttribute(\n 'style',\n 'box-sizing: content-box; position: absolute; left: 0; top: 0; z-index: 1;'\n );\n\n // Apply a ponyfill if ResizeObserver is not implemented by browser.\n if (this.ResizeObserver === undefined) {\n if (window.ResizeObserver !== undefined) {\n this.ResizeObserver = window.ResizeObserver;\n } else {\n var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n this.ResizeObserver = obs.ResizeObserver;\n }\n }\n\n this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n var nentries = entries.length;\n for (var i = 0; i < nentries; i++) {\n var entry = entries[i];\n var width, height;\n if (entry.contentBoxSize) {\n if (entry.contentBoxSize instanceof Array) {\n // Chrome 84 implements new version of spec.\n width = entry.contentBoxSize[0].inlineSize;\n height = entry.contentBoxSize[0].blockSize;\n } else {\n // Firefox implements old version of spec.\n width = entry.contentBoxSize.inlineSize;\n height = entry.contentBoxSize.blockSize;\n }\n } else {\n // Chrome <84 implements even older version of spec.\n width = entry.contentRect.width;\n height = entry.contentRect.height;\n }\n\n // Keep the size of the canvas and rubber band canvas in sync with\n // the canvas container.\n if (entry.devicePixelContentBoxSize) {\n // Chrome 84 implements new version of spec.\n canvas.setAttribute(\n 'width',\n entry.devicePixelContentBoxSize[0].inlineSize\n );\n canvas.setAttribute(\n 'height',\n entry.devicePixelContentBoxSize[0].blockSize\n );\n } else {\n canvas.setAttribute('width', width * fig.ratio);\n canvas.setAttribute('height', height * fig.ratio);\n }\n canvas.setAttribute(\n 'style',\n 'width: ' + width + 'px; height: ' + height + 'px;'\n );\n\n rubberband_canvas.setAttribute('width', width);\n rubberband_canvas.setAttribute('height', height);\n\n // And update the size in Python. We ignore the initial 0/0 size\n // that occurs as the element is placed into the DOM, which should\n // otherwise not happen due to the minimum size styling.\n if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n fig.request_resize(width, height);\n }\n }\n });\n this.resizeObserverInstance.observe(canvas_div);\n\n function on_mouse_event_closure(name) {\n return function (event) {\n return fig.mouse_event(event, name);\n };\n }\n\n rubberband_canvas.addEventListener(\n 'mousedown',\n on_mouse_event_closure('button_press')\n );\n rubberband_canvas.addEventListener(\n 'mouseup',\n on_mouse_event_closure('button_release')\n );\n // Throttle sequential mouse events to 1 every 20ms.\n rubberband_canvas.addEventListener(\n 'mousemove',\n on_mouse_event_closure('motion_notify')\n );\n\n rubberband_canvas.addEventListener(\n 'mouseenter',\n on_mouse_event_closure('figure_enter')\n );\n rubberband_canvas.addEventListener(\n 'mouseleave',\n on_mouse_event_closure('figure_leave')\n );\n\n canvas_div.addEventListener('wheel', function (event) {\n if (event.deltaY < 0) {\n event.step = 1;\n } else {\n event.step = -1;\n }\n on_mouse_event_closure('scroll')(event);\n });\n\n canvas_div.appendChild(canvas);\n canvas_div.appendChild(rubberband_canvas);\n\n this.rubberband_context = rubberband_canvas.getContext('2d');\n this.rubberband_context.strokeStyle = '#000000';\n\n this._resize_canvas = function (width, height, forward) {\n if (forward) {\n canvas_div.style.width = width + 'px';\n canvas_div.style.height = height + 'px';\n }\n };\n\n // Disable right mouse context menu.\n this.rubberband_canvas.addEventListener('contextmenu', function (_e) {\n event.preventDefault();\n return false;\n });\n\n function set_focus() {\n canvas.focus();\n canvas_div.focus();\n }\n\n window.setTimeout(set_focus, 100);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'mpl-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n continue;\n }\n\n var button = (fig.buttons[name] = document.createElement('button'));\n button.classList = 'mpl-widget';\n button.setAttribute('role', 'button');\n button.setAttribute('aria-disabled', 'false');\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n\n var icon_img = document.createElement('img');\n icon_img.src = '_images/' + image + '.png';\n icon_img.srcset = '_images/' + image + '_large.png 2x';\n icon_img.alt = tooltip;\n button.appendChild(icon_img);\n\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n var fmt_picker = document.createElement('select');\n fmt_picker.classList = 'mpl-widget';\n toolbar.appendChild(fmt_picker);\n this.format_dropdown = fmt_picker;\n\n for (var ind in mpl.extensions) {\n var fmt = mpl.extensions[ind];\n var option = document.createElement('option');\n option.selected = fmt === mpl.default_extension;\n option.innerHTML = fmt;\n fmt_picker.appendChild(option);\n }\n\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n};\n\nmpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n // which will in turn request a refresh of the image.\n this.send_message('resize', { width: x_pixels, height: y_pixels });\n};\n\nmpl.figure.prototype.send_message = function (type, properties) {\n properties['type'] = type;\n properties['figure_id'] = this.id;\n this.ws.send(JSON.stringify(properties));\n};\n\nmpl.figure.prototype.send_draw_message = function () {\n if (!this.waiting) {\n this.waiting = true;\n this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n var format_dropdown = fig.format_dropdown;\n var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n fig.ondownload(fig, format);\n};\n\nmpl.figure.prototype.handle_resize = function (fig, msg) {\n var size = msg['size'];\n if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n fig._resize_canvas(size[0], size[1], msg['forward']);\n fig.send_message('refresh', {});\n }\n};\n\nmpl.figure.prototype.handle_rubberband = function (fig, msg) {\n var x0 = msg['x0'] / fig.ratio;\n var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n var x1 = msg['x1'] / fig.ratio;\n var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n x0 = Math.floor(x0) + 0.5;\n y0 = Math.floor(y0) + 0.5;\n x1 = Math.floor(x1) + 0.5;\n y1 = Math.floor(y1) + 0.5;\n var min_x = Math.min(x0, x1);\n var min_y = Math.min(y0, y1);\n var width = Math.abs(x1 - x0);\n var height = Math.abs(y1 - y0);\n\n fig.rubberband_context.clearRect(\n 0,\n 0,\n fig.canvas.width / fig.ratio,\n fig.canvas.height / fig.ratio\n );\n\n fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n};\n\nmpl.figure.prototype.handle_figure_label = function (fig, msg) {\n // Updates the figure title.\n fig.header.textContent = msg['label'];\n};\n\nmpl.figure.prototype.handle_cursor = function (fig, msg) {\n var cursor = msg['cursor'];\n switch (cursor) {\n case 0:\n cursor = 'pointer';\n break;\n case 1:\n cursor = 'default';\n break;\n case 2:\n cursor = 'crosshair';\n break;\n case 3:\n cursor = 'move';\n break;\n }\n fig.rubberband_canvas.style.cursor = cursor;\n};\n\nmpl.figure.prototype.handle_message = function (fig, msg) {\n fig.message.textContent = msg['message'];\n};\n\nmpl.figure.prototype.handle_draw = function (fig, _msg) {\n // Request the server to send over a new figure.\n fig.send_draw_message();\n};\n\nmpl.figure.prototype.handle_image_mode = function (fig, msg) {\n fig.image_mode = msg['mode'];\n};\n\nmpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n for (var key in msg) {\n if (!(key in fig.buttons)) {\n continue;\n }\n fig.buttons[key].disabled = !msg[key];\n fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n }\n};\n\nmpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n if (msg['mode'] === 'PAN') {\n fig.buttons['Pan'].classList.add('active');\n fig.buttons['Zoom'].classList.remove('active');\n } else if (msg['mode'] === 'ZOOM') {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.add('active');\n } else {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.remove('active');\n }\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Called whenever the canvas gets updated.\n this.send_message('ack', {});\n};\n\n// A function to construct a web socket function for onmessage handling.\n// Called in the figure constructor.\nmpl.figure.prototype._make_on_message_function = function (fig) {\n return function socket_on_message(evt) {\n if (evt.data instanceof Blob) {\n /* FIXME: We get \"Resource interpreted as Image but\n * transferred with MIME type text/plain:\" errors on\n * Chrome. But how to set the MIME type? It doesn't seem\n * to be part of the websocket stream */\n evt.data.type = 'image/png';\n\n /* Free the memory for the previous frames */\n if (fig.imageObj.src) {\n (window.URL || window.webkitURL).revokeObjectURL(\n fig.imageObj.src\n );\n }\n\n fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n evt.data\n );\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n } else if (\n typeof evt.data === 'string' &&\n evt.data.slice(0, 21) === 'data:image/png;base64'\n ) {\n fig.imageObj.src = evt.data;\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n }\n\n var msg = JSON.parse(evt.data);\n var msg_type = msg['type'];\n\n // Call the \"handle_{type}\" callback, which takes\n // the figure and JSON message as its only arguments.\n try {\n var callback = fig['handle_' + msg_type];\n } catch (e) {\n console.log(\n \"No handler for the '\" + msg_type + \"' message type: \",\n msg\n );\n return;\n }\n\n if (callback) {\n try {\n // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n callback(fig, msg);\n } catch (e) {\n console.log(\n \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n e,\n e.stack,\n msg\n );\n }\n }\n };\n};\n\n// from http://stackoverflow.com/questions/1114465/getting-mouse-location-in-canvas\nmpl.findpos = function (e) {\n //this section is from http://www.quirksmode.org/js/events_properties.html\n var targ;\n if (!e) {\n e = window.event;\n }\n if (e.target) {\n targ = e.target;\n } else if (e.srcElement) {\n targ = e.srcElement;\n }\n if (targ.nodeType === 3) {\n // defeat Safari bug\n targ = targ.parentNode;\n }\n\n // pageX,Y are the mouse positions relative to the document\n var boundingRect = targ.getBoundingClientRect();\n var x = e.pageX - (boundingRect.left + document.body.scrollLeft);\n var y = e.pageY - (boundingRect.top + document.body.scrollTop);\n\n return { x: x, y: y };\n};\n\n/*\n * return a copy of an object with only non-object keys\n * we need this to avoid circular references\n * http://stackoverflow.com/a/24161582/3208463\n */\nfunction simpleKeys(original) {\n return Object.keys(original).reduce(function (obj, key) {\n if (typeof original[key] !== 'object') {\n obj[key] = original[key];\n }\n return obj;\n }, {});\n}\n\nmpl.figure.prototype.mouse_event = function (event, name) {\n var canvas_pos = mpl.findpos(event);\n\n if (name === 'button_press') {\n this.canvas.focus();\n this.canvas_div.focus();\n }\n\n var x = canvas_pos.x * this.ratio;\n var y = canvas_pos.y * this.ratio;\n\n this.send_message(name, {\n x: x,\n y: y,\n button: event.button,\n step: event.step,\n guiEvent: simpleKeys(event),\n });\n\n /* This prevents the web browser from automatically changing to\n * the text insertion cursor when the button is pressed. We want\n * to control all of the cursor setting manually through the\n * 'cursor' event from matplotlib */\n event.preventDefault();\n return false;\n};\n\nmpl.figure.prototype._key_event_extra = function (_event, _name) {\n // Handle any extra behaviour associated with a key event\n};\n\nmpl.figure.prototype.key_event = function (event, name) {\n // Prevent repeat events\n if (name === 'key_press') {\n if (event.which === this._key) {\n return;\n } else {\n this._key = event.which;\n }\n }\n if (name === 'key_release') {\n this._key = null;\n }\n\n var value = '';\n if (event.ctrlKey && event.which !== 17) {\n value += 'ctrl+';\n }\n if (event.altKey && event.which !== 18) {\n value += 'alt+';\n }\n if (event.shiftKey && event.which !== 16) {\n value += 'shift+';\n }\n\n value += 'k';\n value += event.which.toString();\n\n this._key_event_extra(event, name);\n\n this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n return false;\n};\n\nmpl.figure.prototype.toolbar_button_onclick = function (name) {\n if (name === 'download') {\n this.handle_save(this, null);\n } else {\n this.send_message('toolbar_button', { name: name });\n }\n};\n\nmpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n this.message.textContent = tooltip;\n};\n\n///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n// prettier-ignore\nvar _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\nmpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home icon-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left icon-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right icon-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows icon-move\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-square-o icon-check-empty\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o icon-save\", \"download\"]];\n\nmpl.extensions = [\"eps\", \"jpeg\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\"];\n\nmpl.default_extension = \"png\";/* global mpl */\n\nvar comm_websocket_adapter = function (comm) {\n // Create a \"websocket\"-like object which calls the given IPython comm\n // object with the appropriate methods. Currently this is a non binary\n // socket, so there is still some room for performance tuning.\n var ws = {};\n\n ws.close = function () {\n comm.close();\n };\n ws.send = function (m) {\n //console.log('sending', m);\n comm.send(m);\n };\n // Register the callback with on_msg.\n comm.on_msg(function (msg) {\n //console.log('receiving', msg['content']['data'], msg);\n // Pass the mpl event to the overridden (by mpl) onmessage function.\n ws.onmessage(msg['content']['data']);\n });\n return ws;\n};\n\nmpl.mpl_figure_comm = function (comm, msg) {\n // This is the function which gets called when the mpl process\n // starts-up an IPython Comm through the \"matplotlib\" channel.\n\n var id = msg.content.data.id;\n // Get hold of the div created by the display call when the Comm\n // socket was opened in Python.\n var element = document.getElementById(id);\n var ws_proxy = comm_websocket_adapter(comm);\n\n function ondownload(figure, _format) {\n window.open(figure.canvas.toDataURL());\n }\n\n var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n\n // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n // web socket which is closed, not our websocket->open comm proxy.\n ws_proxy.onopen();\n\n fig.parent_element = element;\n fig.cell_info = mpl.find_output_cell(\"
\");\n if (!fig.cell_info) {\n console.error('Failed to find cell for figure', id, fig);\n return;\n }\n fig.cell_info[0].output_area.element.on(\n 'cleared',\n { fig: fig },\n fig._remove_fig_handler\n );\n};\n\nmpl.figure.prototype.handle_close = function (fig, msg) {\n var width = fig.canvas.width / fig.ratio;\n fig.cell_info[0].output_area.element.off(\n 'cleared',\n fig._remove_fig_handler\n );\n fig.resizeObserverInstance.unobserve(fig.canvas_div);\n\n // Update the output cell to use the data from the current canvas.\n fig.push_to_output();\n var dataURL = fig.canvas.toDataURL();\n // Re-enable the keyboard manager in IPython - without this line, in FF,\n // the notebook keyboard shortcuts fail.\n IPython.keyboard_manager.enable();\n fig.parent_element.innerHTML =\n '';\n fig.close_ws(fig, msg);\n};\n\nmpl.figure.prototype.close_ws = function (fig, msg) {\n fig.send_message('closing', msg);\n // fig.ws.close()\n};\n\nmpl.figure.prototype.push_to_output = function (_remove_interactive) {\n // Turn the data on the canvas into data in the output cell.\n var width = this.canvas.width / this.ratio;\n var dataURL = this.canvas.toDataURL();\n this.cell_info[1]['text/html'] =\n '';\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Tell IPython that the notebook contents must change.\n IPython.notebook.set_dirty(true);\n this.send_message('ack', {});\n var fig = this;\n // Wait a second, then push the new image to the DOM so\n // that it is saved nicely (might be nice to debounce this).\n setTimeout(function () {\n fig.push_to_output();\n }, 1000);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'btn-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n var button;\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n continue;\n }\n\n button = fig.buttons[name] = document.createElement('button');\n button.classList = 'btn btn-default';\n button.href = '#';\n button.title = name;\n button.innerHTML = '';\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n // Add the status bar.\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message pull-right';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n\n // Add the close button to the window.\n var buttongrp = document.createElement('div');\n buttongrp.classList = 'btn-group inline pull-right';\n button = document.createElement('button');\n button.classList = 'btn btn-mini btn-primary';\n button.href = '#';\n button.title = 'Stop Interaction';\n button.innerHTML = '';\n button.addEventListener('click', function (_evt) {\n fig.handle_close(fig, {});\n });\n button.addEventListener(\n 'mouseover',\n on_mouseover_closure('Stop Interaction')\n );\n buttongrp.appendChild(button);\n var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n titlebar.insertBefore(buttongrp, titlebar.firstChild);\n};\n\nmpl.figure.prototype._remove_fig_handler = function (event) {\n var fig = event.data.fig;\n if (event.target !== this) {\n // Ignore bubbled events from children.\n return;\n }\n fig.close_ws(fig, {});\n};\n\nmpl.figure.prototype._root_extra_style = function (el) {\n el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n};\n\nmpl.figure.prototype._canvas_extra_style = function (el) {\n // this is important to make the div 'focusable\n el.setAttribute('tabindex', 0);\n // reach out to IPython and tell the keyboard manager to turn it's self\n // off when our div gets focus\n\n // location in version 3\n if (IPython.notebook.keyboard_manager) {\n IPython.notebook.keyboard_manager.register_events(el);\n } else {\n // location in version 2\n IPython.keyboard_manager.register_events(el);\n }\n};\n\nmpl.figure.prototype._key_event_extra = function (event, _name) {\n var manager = IPython.notebook.keyboard_manager;\n if (!manager) {\n manager = IPython.keyboard_manager;\n }\n\n // Check for shift+enter\n if (event.shiftKey && event.which === 13) {\n this.canvas_div.blur();\n // select the cell after this one\n var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n IPython.notebook.select(index + 1);\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n fig.ondownload(fig, null);\n};\n\nmpl.find_output_cell = function (html_output) {\n // Return the cell and output element which can be found *uniquely* in the notebook.\n // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n // IPython event is triggered only after the cells have been serialised, which for\n // our purposes (turning an active figure into a static one), is too late.\n var cells = IPython.notebook.get_cells();\n var ncells = cells.length;\n for (var i = 0; i < ncells; i++) {\n var cell = cells[i];\n if (cell.cell_type === 'code') {\n for (var j = 0; j < cell.output_area.outputs.length; j++) {\n var data = cell.output_area.outputs[j];\n if (data.data) {\n // IPython >= 3 moved mimebundle to data attribute of output\n data = data.data;\n }\n if (data['text/html'] === html_output) {\n return [cell, data, j];\n }\n }\n }\n }\n};\n\n// Register the function which deals with the matplotlib target/channel.\n// The kernel may be null if the page has been refreshed.\nif (IPython.notebook.kernel !== null) {\n IPython.notebook.kernel.comm_manager.register_target(\n 'matplotlib',\n mpl.mpl_figure_comm\n );\n}\n", "text/plain": [ "" ] diff --git a/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb b/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb index 5e7867307b..c8517675ca 100644 --- a/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb +++ b/docs/examples/notebooks/learn/TensorizingInterpolations.ipynb @@ -425,7 +425,7 @@ "\n", "```python\n", "all_deltas = ...\n", - "for nh, histo in enumerate(histoset):\n", + "for nh, _ in enumerate(histoset):\n", " deltas = all_deltas[nh]\n", " ...\n", "```\n", @@ -435,7 +435,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -452,7 +452,7 @@ " all_histo_deltas_up = allset_all_histo_deltas_up[nset]\n", " all_histo_deltas_dn = allset_all_histo_deltas_dn[nset]\n", "\n", - " for nh, histo in enumerate(histoset):\n", + " for nh, _ in enumerate(histoset):\n", " alpha_deltas = []\n", " for alpha in alphaset:\n", " alpha_result = []\n", @@ -740,7 +740,7 @@ " for nset, histoset in enumerate(histogramssets):\n", " all_histos_deltas = allsets_all_histos_deltas[nset]\n", " set_result = []\n", - " for nh, histo in enumerate(histoset):\n", + " for nh, _ in enumerate(histoset):\n", " set_result.append([d + histoset[nh, 1] for d in all_histos_deltas[nh]])\n", " all_results.append(set_result)\n", " return all_results" @@ -1238,7 +1238,7 @@ " for nset, histoset in enumerate(histogramssets):\n", " all_histos_deltas = allsets_all_histos_deltas[nset]\n", " set_result = []\n", - " for nh, histo in enumerate(histoset):\n", + " for nh, _ in enumerate(histoset):\n", " set_result.append([histoset[nh, 1] * d for d in all_histos_deltas[nh]])\n", " all_results.append(set_result)\n", " return all_results\n", From 123803a39a1a37fec4d968c687a9be099aa12f35 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:14:03 -0600 Subject: [PATCH 16/23] fix: Apply flake8-bugbear B007 for src https://docs.astral.sh/ruff/rules/unused-loop-control-variable/ --- src/pyhf/pdf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index 58ca18dda6..a5e6f1ea55 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -154,7 +154,7 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): # 4. finalize nominal & modifier builders nominal_rates = nominal.finalize() finalizd_builder_data = {} - for k, (builder, applier) in modifier_set.items(): + for k, (builder, applier) in modifier_set.items(): # noqa: B007 finalizd_builder_data[k] = modifiers_builders[k].finalize() # 5. collect parameters from spec and from user. @@ -183,7 +183,7 @@ def _nominal_and_modifiers_from_spec(modifier_set, config, spec, batch_size): # 6. use finalized modifier data to build reparametrization function for main likelihood part the_modifiers = {} - for k, (builder, applier) in modifier_set.items(): + for k, (builder, applier) in modifier_set.items(): # noqa: B007 the_modifiers[k] = applier( modifiers=[ x for x in config.modifiers if x[1] == k From 12de7ed2eceb54e73aa57fdae8b917ada3562d78 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:15:17 -0600 Subject: [PATCH 17/23] fix: Apply B009 (get-attr-with-constant) --- tests/test_optim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_optim.py b/tests/test_optim.py index bda1aadee1..011ed688d3 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -507,7 +507,7 @@ def test_init_pars_sync_fixed_values_minuit(mocker): # patch all we need from pyhf.optimize import opt_minuit - minuit = mocker.patch.object(getattr(opt_minuit, "iminuit"), "Minuit") + minuit = mocker.patch.object(opt_minuit.iminuit, "Minuit") minimizer = opt._get_minimizer(None, [9, 9, 9], [(0, 10)] * 3, fixed_vals=[(0, 1)]) assert minuit.called assert minuit.call_args.args[1] == [1, 9, 9] From 428e5bdd63958ee0360b06979a49c736b70a4567 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:21:45 -0600 Subject: [PATCH 18/23] fix: Apply flake8-bugbear B006 for src https://docs.astral.sh/ruff/rules/mutable-argument-default/ --- src/pyhf/optimize/mixins.py | 4 +++- src/pyhf/optimize/opt_minuit.py | 4 +++- src/pyhf/optimize/opt_scipy.py | 4 +++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index 7696335e80..f832067256 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -39,7 +39,7 @@ def _internal_minimize( do_grad=False, bounds=None, fixed_vals=None, - options={}, + options=None, par_names=None, ): minimizer = self._get_minimizer( @@ -50,6 +50,8 @@ def _internal_minimize( do_grad=do_grad, par_names=par_names, ) + if options is None: + options = {} result = self._minimize( minimizer, func, diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 20282dd847..94d1b81c9f 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -84,7 +84,7 @@ def _minimize( do_grad=False, bounds=None, fixed_vals=None, - options={}, + options=None, ): """ Same signature as :func:`scipy.optimize.minimize`. @@ -103,6 +103,8 @@ def _minimize( Returns: fitresult (scipy.optimize.OptimizeResult): the fit result """ + if options is None: + options = {} maxiter = options.pop("maxiter", self.maxiter) # do_grad value results in iminuit.Minuit.strategy of either: # 0: Fast. Does not check a user-provided gradient. diff --git a/src/pyhf/optimize/opt_scipy.py b/src/pyhf/optimize/opt_scipy.py index b0b2a2d0ba..6701c36408 100644 --- a/src/pyhf/optimize/opt_scipy.py +++ b/src/pyhf/optimize/opt_scipy.py @@ -51,7 +51,7 @@ def _minimize( do_grad=False, bounds=None, fixed_vals=None, - options={}, + options=None, ): """ Same signature as :func:`scipy.optimize.minimize`. @@ -71,6 +71,8 @@ def _minimize( Returns: fitresult (scipy.optimize.OptimizeResult): the fit result """ + if options is None: + options = {} maxiter = options.pop("maxiter", self.maxiter) verbose = options.pop("verbose", self.verbose) method = options.pop("method", "SLSQP") From ea79e233b0e5c2dc24eab5183061ab573abe50fd Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:24:02 -0600 Subject: [PATCH 19/23] fix: Apply flake8-bugbear B006 for tests https://docs.astral.sh/ruff/rules/mutable-argument-default/ --- tests/test_backend_consistency.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_backend_consistency.py b/tests/test_backend_consistency.py index 2d595804fb..d346ffd96e 100644 --- a/tests/test_backend_consistency.py +++ b/tests/test_backend_consistency.py @@ -59,9 +59,7 @@ def generate_source_poisson(n_bins): @pytest.mark.parametrize("n_bins", bins, ids=bin_ids) @pytest.mark.parametrize("invert_order", [False, True], ids=["normal", "inverted"]) -def test_hypotest_qmu_tilde( - n_bins, invert_order, tolerance={"numpy": 1e-02, "tensors": 5e-03} -): +def test_hypotest_qmu_tilde(n_bins, invert_order, tolerance=None): """ Check that the different backends all compute a test statistic that is within a specific tolerance of each other. @@ -74,6 +72,8 @@ def test_hypotest_qmu_tilde( Returns: None """ + if tolerance is None: + tolerance = {"numpy": 1e-02, "tensors": 5e-03} source = generate_source_static(n_bins) From 83d0760492095676701d0d82d33891f21046ae6f Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:25:50 -0600 Subject: [PATCH 20/23] fix: Apply flake8-bugbear B006 for docs https://docs.astral.sh/ruff/rules/mutable-argument-default/ --- .../notebooks/binderexample/StatisticalAnalysis.ipynb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb index 6aa358bb28..723368d7ac 100644 --- a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb +++ b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb @@ -229,7 +229,9 @@ " fig.canvas.draw()\n", "\n", "\n", - "def plot(ax=None, order=[3, 2, 1, 0], **par_settings):\n", + "def plot(ax=None, order=None, **par_settings):\n", + " if order is None:\n", + " order = [3, 2, 1, 0]\n", " pars = pyhf.tensorlib.astensor(pdf.config.suggested_init())\n", " for k, v in par_settings.items():\n", " pars[par_name_dict[k]] = v\n", From a29a623e62e6abe5faae3e5a949b27c0891f3460 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:27:52 -0600 Subject: [PATCH 21/23] fix B011 B011 Do not `assert False` (`python -O` removes these calls), raise `AssertionError()` c.f. https://docs.astral.sh/ruff/rules/assert-false/ --- tests/test_backend_consistency.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_backend_consistency.py b/tests/test_backend_consistency.py index d346ffd96e..b1066dcac6 100644 --- a/tests/test_backend_consistency.py +++ b/tests/test_backend_consistency.py @@ -134,4 +134,4 @@ def test_hypotest_qmu_tilde(n_bins, invert_order, tolerance=None): print( f"Ratio to NumPy+SciPy exceeded tolerance of {tolerance['numpy']}: {numpy_ratio_delta_unity.tolist()}" ) - assert False + raise AssertionError() from None From cf372c6ad80cbcd2a1d89e40693105af9956b4cc Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:31:40 -0600 Subject: [PATCH 22/23] fix B017 B017 Do not assert blind exception: `Exception` https://docs.astral.sh/ruff/rules/assert-raises-exception/ --- tests/test_tensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_tensor.py b/tests/test_tensor.py index dc6029d618..91fc21408c 100644 --- a/tests/test_tensor.py +++ b/tests/test_tensor.py @@ -198,7 +198,7 @@ def test_broadcasting(backend): ), ) ) == [[1, 1, 1], [2, 3, 4], [5, 6, 7]] - with pytest.raises(Exception): + with pytest.raises(ValueError): tb.simple_broadcast( tb.astensor([1]), tb.astensor([2, 3]), tb.astensor([5, 6, 7]) ) From 5872f176faa8b3e0002b6ebfb4bcc86e410b70bc Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 23 Oct 2025 17:32:19 -0600 Subject: [PATCH 23/23] style: Enable flake8-bugbear in ruff --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 29e2fd1641..d4cd2d69a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -256,6 +256,7 @@ extend-select = [ "RUF", # Ruff-specific "TID", # flake8-tidy-imports "I", # isort + "B", # flake8-bugbear ] ignore = [ "E402",