Skip to content

Commit

Permalink
Merge branch 'main' of github.com:inducer/pytools into tag-propagation
Browse files Browse the repository at this point in the history
  • Loading branch information
a-alveyblanc committed Aug 31, 2024
2 parents fdd1272 + 6c1bd3f commit 678e0ed
Show file tree
Hide file tree
Showing 13 changed files with 180 additions and 71 deletions.
5 changes: 3 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
python-version: '3.x'
- name: "Main Script"
run: |
EXTRA_INSTALL="numpy"
EXTRA_INSTALL="numpy siphash24"
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/prepare-and-run-mypy.sh
. ./prepare-and-run-mypy.sh python3 mypy
Expand All @@ -89,7 +89,7 @@ jobs:
# AK, 2020-12-13
rm pytools/mpiwrap.py
EXTRA_INSTALL="numpy frozendict immutabledict orderedsets constantdict immutables pyrsistent attrs"
EXTRA_INSTALL="numpy frozendict immutabledict orderedsets constantdict immutables pyrsistent attrs siphash24"
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
. ./build-and-test-py-project.sh
Expand All @@ -104,6 +104,7 @@ jobs:
python-version: '3.x'
- name: "Main Script"
run: |
EXTRA_INSTALL="siphash24"
rm pytools/{convergence,spatial_btree,obj_array,mpiwrap}.py
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
. ./build-and-test-py-project.sh
Expand Down
9 changes: 5 additions & 4 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Pytest:
# AK, 2020-12-13
rm pytools/mpiwrap.py
export EXTRA_INSTALL="numpy"
export EXTRA_INSTALL="numpy siphash24"
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
. ./build-and-test-py-project.sh
tags:
Expand All @@ -17,6 +17,7 @@ Pytest:

Pytest without Numpy:
script: |
EXTRA_INSTALL="siphash24"
rm pytools/{convergence,spatial_btree,obj_array,mpiwrap}.py
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
. ./build-and-test-py-project.sh
Expand All @@ -40,7 +41,7 @@ Pytest without Numpy:
# except:
# - tags

Flake8:
Ruff:
script:
- pipx install ruff
- ruff check
Expand All @@ -60,7 +61,7 @@ Mypy:

Pylint:
script:
- EXTRA_INSTALL="numpy pymbolic orderedsets"
- EXTRA_INSTALL="numpy pymbolic orderedsets siphash24"
- py_version=3
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/prepare-and-run-pylint.sh
- . ./prepare-and-run-pylint.sh "$CI_PROJECT_NAME"
Expand All @@ -71,7 +72,7 @@ Pylint:

Documentation:
script:
- EXTRA_INSTALL="numpy"
- EXTRA_INSTALL="numpy siphash24"
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-docs.sh
- ". ./build-docs.sh"
tags:
Expand Down
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ Pytools: Lots of Little Utilities
.. image:: https://gitlab.tiker.net/inducer/pytools/badges/main/pipeline.svg
:alt: Gitlab Build Status
:target: https://gitlab.tiker.net/inducer/pytools/commits/main
.. image:: https://github.com/inducer/pytools/workflows/CI/badge.svg?branch=main&event=push
.. image:: https://github.com/inducer/pytools/workflows/CI/badge.svg?branch=main
:alt: Github Build Status
:target: https://github.com/inducer/pytools/actions?query=branch%3Amain+workflow%3ACI+event%3Apush
:target: https://github.com/inducer/pytools/actions?query=branch%3Amain+workflow%3ACI
.. image:: https://badge.fury.io/py/pytools.png
:alt: Python Package Index Release Page
:target: https://pypi.org/project/pytools/
Expand Down
10 changes: 5 additions & 5 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]

intersphinx_mapping = {
"loopy": ("https://documen.tician.de/loopy/", None),
"loopy": ("https://documen.tician.de/loopy", None),
"numpy": ("https://numpy.org/doc/stable", None),
"pymbolic": ("https://documen.tician.de/pymbolic/", None),
"pytest": ("https://docs.pytest.org/en/stable/", None),
"setuptools": ("https://setuptools.pypa.io/en/latest/", None),
"pymbolic": ("https://documen.tician.de/pymbolic", None),
"pytest": ("https://docs.pytest.org/en/stable", None),
"setuptools": ("https://setuptools.pypa.io/en/latest", None),
"python": ("https://docs.python.org/3", None),
"platformdirs": ("https://platformdirs.readthedocs.io/en/latest/", None),
"platformdirs": ("https://platformdirs.readthedocs.io/en/latest", None),
}

nitpicky = True
Expand Down
2 changes: 1 addition & 1 deletion doc/upload-docs.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#! /bin/sh

rsync --verbose --archive --delete _build/html/* doc-upload:doc/pytools
rsync --verbose --archive --delete _build/html/ doc-upload:doc/pytools
14 changes: 12 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ requires = [

[project]
name = "pytools"
version = "2024.1.10"
version = "2024.1.13"
description = "A collection of tools for Python"
readme = "README.rst"
license = { text = "MIT" }
Expand Down Expand Up @@ -34,7 +34,6 @@ dependencies = [
"platformdirs>=2.2",
# for dataclass_transform with frozen_default
"typing-extensions>=4; python_version<'3.13'",
"siphash24>=1.6",
]

[project.optional-dependencies]
Expand All @@ -46,6 +45,9 @@ test = [
"pytest",
"ruff",
]
siphash = [
"siphash24>=1.6",
]

[project.urls]
Documentation = "https://documen.tician.de/pytools/"
Expand Down Expand Up @@ -108,5 +110,13 @@ known-local-folder = [
lines-after-imports = 2

[tool.mypy]
python_version = "3.8"
ignore_missing_imports = true
warn_unused_ignores = true
# TODO: enable this at some point
# check_untyped_defs = true

[tool.typos.default]
extend-ignore-re = [
"(?Rm)^.*(#|//)\\s*spellchecker:\\s*disable-line$"
]
105 changes: 81 additions & 24 deletions pytools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,15 @@
from functools import reduce, wraps
from sys import intern
from typing import (
TYPE_CHECKING,
Any,
Callable,
ClassVar,
Collection,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
List,
Mapping,
Optional,
Expand All @@ -52,15 +53,21 @@
)


try:
from typing import Concatenate, SupportsIndex
except ImportError:
from typing_extensions import Concatenate, SupportsIndex
if TYPE_CHECKING:
# NOTE: mypy seems to be confused by the `try.. except` below when called with
# python -m mypy --python-version 3.8 ...
# see https://github.com/python/mypy/issues/14220
from typing_extensions import Concatenate, ParamSpec, SupportsIndex
else:
try:
from typing import Concatenate, SupportsIndex
except ImportError:
from typing_extensions import Concatenate, SupportsIndex

try:
from typing import ParamSpec
except ImportError:
from typing_extensions import ParamSpec # type: ignore[assignment]
try:
from typing import ParamSpec
except ImportError:
from typing_extensions import ParamSpec # type: ignore[assignment]


# These are deprecated and will go away in 2022.
Expand Down Expand Up @@ -203,10 +210,16 @@
.. autofunction:: strtobool
.. autofunction:: to_identifier
Sequence utilities
------------------
Set-like functions for iterables
--------------------------------
These functions provide set-like operations on iterables. In contrast to
Python's built-in set type, they maintain the internal order of elements.
.. autofunction:: unique
.. autofunction:: unique_difference
.. autofunction:: unique_intersection
.. autofunction:: unique_union
Type Variables Used
-------------------
Expand Down Expand Up @@ -1046,11 +1059,11 @@ def monkeypatch_class(_name, bases, namespace):
# {{{ generic utilities

def add_tuples(t1, t2):
return tuple([t1v + t2v for t1v, t2v in zip(t1, t2)])
return tuple(t1v + t2v for t1v, t2v in zip(t1, t2))


def negate_tuple(t1):
return tuple([-t1v for t1v in t1])
return tuple(-t1v for t1v in t1)


def shift(vec, dist):
Expand Down Expand Up @@ -1601,7 +1614,7 @@ def add_row(self, row: Tuple[Any, ...]) -> None:
f"tried to add a row with {len(row)} columns to "
f"a table with {self.ncolumns} columns")

self.rows.append(tuple([str(i) for i in row]))
self.rows.append(tuple(str(i) for i in row))

def _get_alignments(self) -> Tuple[str, ...]:
# NOTE: If not all alignments were specified, extend alignments with the
Expand All @@ -1611,9 +1624,9 @@ def _get_alignments(self) -> Tuple[str, ...]:
)

def _get_column_widths(self, rows) -> Tuple[int, ...]:
return tuple([
return tuple(
max(len(row[i]) for row in rows) for i in range(self.ncolumns)
])
)

def __str__(self) -> str:
"""
Expand Down Expand Up @@ -1670,7 +1683,7 @@ def escape(cell: str) -> str:
# Pipe symbols ('|') must be replaced
return cell.replace("|", "\\|")

rows = [tuple([escape(cell) for cell in row]) for row in self.rows]
rows = [tuple(escape(cell) for cell in row) for row in self.rows]
alignments = self._get_alignments()
col_widths = self._get_column_widths(rows)

Expand Down Expand Up @@ -1780,9 +1793,9 @@ def remove_columns(i, row):
if i == 0 or skip_columns is None:
return row
else:
return tuple([
return tuple(
entry for i, entry in enumerate(row) if i not in skip_columns
])
)

alignments = sum((
remove_columns(i, tbl._get_alignments())
Expand Down Expand Up @@ -2252,7 +2265,7 @@ class UniqueNameGenerator:
.. automethod:: __call__
"""
def __init__(self,
existing_names: Optional[Set[str]] = None,
existing_names: Optional[Collection[str]] = None,
forced_prefix: str = ""):
"""
Create a new :class:`UniqueNameGenerator`.
Expand All @@ -2264,7 +2277,7 @@ def __init__(self,
if existing_names is None:
existing_names = set()

self.existing_names = existing_names.copy()
self.existing_names = set(existing_names)
self.forced_prefix = forced_prefix
self.prefix_to_counter: Dict[str, int] = {}

Expand Down Expand Up @@ -2983,11 +2996,55 @@ def to_identifier(s: str) -> str:

# {{{ unique

def unique(seq: Iterable[T]) -> Iterator[T]:
"""Yield unique elements in *seq*, removing all duplicates. The internal
def unique(seq: Iterable[T]) -> Collection[T]:
"""Return unique elements in *seq*, removing all duplicates. The internal
order of the elements is preserved. See also
:func:`itertools.groupby` (which removes consecutive duplicates)."""
return iter(dict.fromkeys(seq))
return dict.fromkeys(seq)


def unique_difference(*args: Iterable[T]) -> Collection[T]:
r"""Return unique elements that are in the first iterable in *\*args* but not
in any of the others. The internal order of the elements is preserved."""
if not args:
return []

res = dict.fromkeys(args[0])
for seq in args[1:]:
for item in seq:
if item in res:
del res[item]

return res


def unique_intersection(*args: Iterable[T]) -> Collection[T]:
r"""Return unique elements that are common to all iterables in *\*args*.
The internal order of the elements is preserved."""
if not args:
return []

res = dict.fromkeys(args[0])
for seq in args[1:]:
seq = set(seq)
res = {item: None for item in res if item in seq}

return res


def unique_union(*args: Iterable[T]) -> Collection[T]:
r"""Return unique elements that are in any iterable in *\*args*.
The internal order of the elements is preserved."""
if not args:
return []

res: Dict[T, None] = {}
for seq in args:
for item in seq:
if item not in res:
res[item] = None

return res

# }}}

Expand Down
9 changes: 5 additions & 4 deletions pytools/convergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,22 +67,23 @@ def estimate_order_of_convergence(self,

# NOTE: in case any of the errors are exactly 0.0, which
# can give NaNs in `estimate_order_of_convergence`
emax = np.amax(errors)
emax: float = np.amax(errors)
errors += (1 if emax == 0 else emax) * np.finfo(errors.dtype).eps

size = len(abscissae)
if gliding_mean is None:
gliding_mean = size

data_points = size - gliding_mean + 1
result = np.zeros((data_points, 2), float)
result: np.ndarray = np.zeros((data_points, 2), float)
for i in range(data_points):
result[i, 0], result[i, 1] = estimate_order_of_convergence(
abscissae[i:i+gliding_mean], errors[i:i+gliding_mean])
return result

def order_estimate(self) -> float:
return self.estimate_order_of_convergence()[0, 1]
from typing import cast
return cast(float, self.estimate_order_of_convergence()[0, 1])

def max_error(self) -> float:
return max(err for absc, err in self.history)
Expand Down Expand Up @@ -176,7 +177,7 @@ def stringify_eocs(*eocs: EOCRecorder,
f"{len(eocs)} EOCRecorder instances")

if names is None:
names = tuple([f"{error_label} {i}" for i in range(len(eocs))])
names = tuple(f"{error_label} {i}" for i in range(len(eocs)))

from pytools import merge_tables
tbl = merge_tables(*[eoc._to_table(
Expand Down
Loading

0 comments on commit 678e0ed

Please sign in to comment.