Skip to content

Commit

Permalink
Merge branch 'main' into pdict-threads
Browse files Browse the repository at this point in the history
  • Loading branch information
matthiasdiener authored Jul 1, 2024
2 parents 9546e0b + 72d26b5 commit 7e14c94
Show file tree
Hide file tree
Showing 22 changed files with 358 additions and 212 deletions.
14 changes: 5 additions & 9 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,16 @@ on:
- cron: '17 3 * * 0'

jobs:
flake8:
name: Flake8
ruff:
name: Ruff
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
-
uses: actions/setup-python@v5
with:
# matches compat target in setup.py
python-version: '3.8'
- uses: actions/setup-python@v5
- name: "Main Script"
run: |
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/prepare-and-run-flake8.sh
. ./prepare-and-run-flake8.sh "$(basename $GITHUB_REPOSITORY)"
pip install ruff
ruff check
validate_cff:
name: Validate CITATION.cff
Expand Down
6 changes: 3 additions & 3 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ Pytest without Numpy:

Flake8:
script:
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/prepare-and-run-flake8.sh
- . ./prepare-and-run-flake8.sh "$CI_PROJECT_NAME"
- pipx install ruff
- ruff check
tags:
- python3
- docker-runner
except:
- tags

Expand Down
4 changes: 4 additions & 0 deletions doc/convergence.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
Testing convergence
-------------------

.. automodule:: pytools.convergence
1 change: 1 addition & 0 deletions doc/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ Welcome to pytools's documentation!
reference
obj_array
persistent_dict
convergence
graph
tag
codegen
Expand Down
97 changes: 97 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
[build-system]
build-backend = "setuptools.build_meta"
requires = [
"setuptools>=63",
]

[project]
name = "pytools"
version = "2024.1.6"
description = "A collection of tools for Python"
readme = "README.rst"
license = { text = "MIT" }
requires-python = "~=3.8"
authors = [
{ name = "Andreas Kloeckner", email = "inform@tiker.net" },
]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Other Audience",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
]
dependencies = [
"platformdirs>=2.2.0",
"typing_extensions>=4.0; python_version<'3.11'",
]

[project.optional-dependencies]
numpy = [
"numpy>=1.6.0",
]

test = [
"mypy",
"pytest",
"ruff",
]

[project.urls]
Homepage = "https://github.com/inducer/pytools/"
Documentation = "https://documen.tician.de/pytools/"

[tool.setuptools.package-data]
pytools = [
"py.typed",
]

[tool.ruff]
target-version = "py38"
line-length = 85

preview = true
[tool.ruff.lint]
extend-select = [
"B", # flake8-bugbear
"C", # flake8-comprehensions
"E", # pycodestyle
"F", # pyflakes
"I", # flake8-isort
"N", # pep8-naming
"NPY", # numpy
"Q", # flake8-quotes
"W", # pycodestyle
]
extend-ignore = [
"C90", # McCabe complexity
"E221", # multiple spaces before operator
"E226", # missing whitespace around arithmetic operator
"E402", # module-level import not at top of file
]
[tool.ruff.lint.flake8-quotes]
docstring-quotes = "double"
inline-quotes = "double"
multiline-quotes = "double"

[tool.ruff.lint.isort]
combine-as-imports = true

known-local-folder = [
"pytools",
]
lines-after-imports = 2

[tool.mypy]
ignore_missing_imports = true
warn_unused_ignores = true

71 changes: 47 additions & 24 deletions pytools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,24 @@
from functools import reduce, wraps
from sys import intern
from typing import (
Any, Callable, ClassVar, Dict, Generic, Hashable, Iterable, Iterator, List,
Mapping, Optional, Sequence, Set, Tuple, Type, TypeVar, Union)
Any,
Callable,
ClassVar,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)


try:
Expand Down Expand Up @@ -414,7 +430,10 @@ class RecordWithoutPickling:
__slots__: ClassVar[List[str]] = []
fields: ClassVar[Set[str]]

def __init__(self, valuedict=None, exclude=None, **kwargs):
def __init__(self,
valuedict: Optional[Mapping[str, Any]] = None,
exclude: Optional[Sequence[str]] = None,
**kwargs: Any) -> None:
assert self.__class__ is not Record

if exclude is None:
Expand Down Expand Up @@ -525,7 +544,8 @@ def __init__(self, value):

def get(self):
from warnings import warn
warn("Reference.get() is deprecated -- use ref.value instead")
warn("Reference.get() is deprecated -- use ref.value instead. "
"This will stop working in 2025.", stacklevel=2)
return self.value

def set(self, value):
Expand Down Expand Up @@ -604,7 +624,7 @@ def one(iterable: Iterable[T]) -> T:
try:
v = next(it)
except StopIteration:
raise ValueError("empty iterable passed to 'one()'")
raise ValueError("empty iterable passed to 'one()'") from None

def no_more():
try:
Expand All @@ -626,7 +646,7 @@ def is_single_valued(
try:
first_item = next(it)
except StopIteration:
raise ValueError("empty iterable passed to 'single_valued()'")
raise ValueError("empty iterable passed to 'single_valued()'") from None

for other_item in it:
if not equality_pred(other_item, first_item):
Expand All @@ -653,7 +673,7 @@ def single_valued(
try:
first_item = next(it)
except StopIteration:
raise ValueError("empty iterable passed to 'single_valued()'")
raise ValueError("empty iterable passed to 'single_valued()'") from None

def others_same():
for other_item in it:
Expand Down Expand Up @@ -940,10 +960,7 @@ def new_inner(*args: P.args, **kwargs: P.kwargs) -> R:
self.cache_dict[args] = result
return result

# NOTE: mypy gets confused because it types `wraps` as
# Callable[[VarArg(Any)], Any]
# which, for some reason, is not compatible with `F`
return new_inner # type: ignore[return-value]
return new_inner


class keyed_memoize_in(Generic[P, R]): # noqa
Expand Down Expand Up @@ -1241,7 +1258,7 @@ def argmin2(iterable, return_value=False):
try:
current_argmin, current_min = next(it)
except StopIteration:
raise ValueError("argmin of empty iterable")
raise ValueError("argmin of empty iterable") from None

for arg, item in it:
if item < current_min:
Expand All @@ -1259,7 +1276,7 @@ def argmax2(iterable, return_value=False):
try:
current_argmax, current_max = next(it)
except StopIteration:
raise ValueError("argmax of empty iterable")
raise ValueError("argmax of empty iterable") from None

for arg, item in it:
if item > current_max:
Expand Down Expand Up @@ -1326,7 +1343,7 @@ def average(iterable):
s = next(it)
count = 1
except StopIteration:
raise ValueError("empty average")
raise ValueError("empty average") from None

for value in it:
s = s + value
Expand Down Expand Up @@ -1441,7 +1458,7 @@ def generate_decreasing_nonnegative_tuples_summing_to(
yield ()
elif length == 1:
if n <= max_value:
#print "MX", n, max_value
# print "MX", n, max_value
yield (n,)
else:
return
Expand All @@ -1450,7 +1467,7 @@ def generate_decreasing_nonnegative_tuples_summing_to(
max_value = n

for i in range(min_value, max_value+1):
#print "SIG", sig, i
# print "SIG", sig, i
for remainder in generate_decreasing_nonnegative_tuples_summing_to(
n-i, length-1, min_value, i):
yield (i,) + remainder
Expand Down Expand Up @@ -1502,7 +1519,7 @@ def generate_permutations(original):
else:
for perm_ in generate_permutations(original[1:]):
for i in range(len(perm_)+1):
#nb str[0:1] works in both string and list contexts
# nb str[0:1] works in both string and list contexts
yield perm_[:i] + original[0:1] + perm_[i:]


Expand All @@ -1527,7 +1544,7 @@ def enumerate_basic_directions(dimensions):

# {{{ graph algorithms

from pytools.graph import a_star as a_star_moved
from pytools.graph import a_star as a_star_moved # noqa: E402


a_star = MovedFunctionDeprecationWrapper(a_star_moved)
Expand Down Expand Up @@ -1808,7 +1825,7 @@ def string_histogram( # pylint: disable=too-many-arguments,too-many-locals
for value in iterable:
if max_value is not None and value > max_value or value < bin_starts[0]:
from warnings import warn
warn("string_histogram: out-of-bounds value ignored")
warn("string_histogram: out-of-bounds value ignored", stacklevel=2)
else:
bin_nr = bisect(bin_starts, value)-1
try:
Expand Down Expand Up @@ -2425,7 +2442,7 @@ def find_git_revision(tree_root): # pylint: disable=too-many-locals
assert retcode is not None
if retcode != 0:
from warnings import warn
warn("unable to find git revision")
warn("unable to find git revision", stacklevel=1)
return None

return git_rev
Expand Down Expand Up @@ -2704,7 +2721,8 @@ def natsorted(iterable, key=None, reverse=False):
.. versionadded:: 2020.1
"""
if key is None:
key = lambda x: x
def key(x):
return x
return sorted(iterable, key=lambda y: natorder(key(y)), reverse=reverse)

# }}}
Expand Down Expand Up @@ -2768,7 +2786,9 @@ def resolve_name(name):

# {{{ unordered_hash

def unordered_hash(hash_instance, iterable, hash_constructor=None):
def unordered_hash(hash_instance: Any,
iterable: Iterable[Any],
hash_constructor: Optional[Callable[[], Any]] = None) -> Any:
"""Using a hash algorithm given by the parameter-less constructor
*hash_constructor*, return a hash object whose internal state
depends on the entries of *iterable*, but not their order. If *hash*
Expand All @@ -2794,6 +2814,8 @@ def unordered_hash(hash_instance, iterable, hash_constructor=None):
from functools import partial
hash_constructor = partial(hashlib.new, hash_instance.name)

assert hash_constructor is not None

h_int = 0
for i in iterable:
h_i = hash_constructor()
Expand Down Expand Up @@ -2967,8 +2989,9 @@ def to_identifier(s: str) -> str:

# {{{ unique

def unique(seq: Sequence[T]) -> Iterator[T]:
"""Yield unique elements in *seq*, removing all duplicates. See also
def unique(seq: Iterable[T]) -> Iterator[T]:
"""Yield unique elements in *seq*, removing all duplicates. The internal
order of the elements is preserved. See also
:func:`itertools.groupby` (which removes consecutive duplicates)."""
return iter(dict.fromkeys(seq))

Expand Down
14 changes: 12 additions & 2 deletions pytools/convergence.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
"""
.. autofunction:: estimate_order_of_convergence
.. autoclass:: EOCRecorder
.. autofunction:: stringify_eocs
.. autoclass:: PConvergenceVerifier
"""


import numbers
from typing import List, Optional, Tuple

Expand All @@ -7,9 +15,11 @@
# {{{ eoc estimation --------------------------------------------------------------

def estimate_order_of_convergence(abscissae, errors):
"""Assuming that abscissae and errors are connected by a law of the form
r"""Assuming that abscissae and errors are connected by a law of the form
.. math::
error = constant * abscissa ^ (order),
\text{Error} = \text{constant} \cdot \text{abscissa }^{\text{order}},
this function finds, in a least-squares sense, the best approximation of
constant and order for the given data set. It returns a tuple (constant, order).
Expand Down
3 changes: 1 addition & 2 deletions pytools/datatable.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
"""


# type-ignore-reason: Record is untyped
class Row(Record): # type: ignore[misc]
class Row(Record):
pass


Expand Down
Loading

0 comments on commit 7e14c94

Please sign in to comment.