Skip to content

Commit 3b8dbe6

Browse files
authored
Enabling 3.12, using pytest-benchmark (#61)
1 parent ac5afdd commit 3b8dbe6

File tree

6 files changed

+61
-122
lines changed

6 files changed

+61
-122
lines changed

.github/workflows/test.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ jobs:
1515
fail-fast: false
1616
matrix:
1717
python-version:
18+
- "3.12"
1819
- "3.11"
1920
- "3.10"
2021
- "3.9"

.pre-commit-config.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,19 +7,19 @@ repos:
77
- id: check-toml
88
- id: check-yaml
99
- repo: https://github.com/PyCQA/isort
10-
rev: "5.12.0"
10+
rev: "5.13.2"
1111
hooks:
1212
- id: isort
1313
- repo: https://github.com/psf/black
14-
rev: "22.6.0"
14+
rev: "23.12.1"
1515
hooks:
1616
- id: black-jupyter
1717
- repo: https://github.com/PyCQA/flake8
18-
rev: "4.0.1"
18+
rev: "7.0.0"
1919
hooks:
2020
- id: flake8
2121
- repo: https://github.com/pre-commit/mirrors-mypy
22-
rev: "v1.0.0"
22+
rev: "v1.8.0"
2323
hooks:
2424
- id: mypy
2525
additional_dependencies:

setup.cfg

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,7 @@ dev =
7777
pydantic>2
7878
pytest
7979
pytest-asyncio
80+
pytest-benchmark
8081
pytest-cov
8182
pytest-xdist
8283
snakeviz
@@ -92,7 +93,6 @@ dev =
9293
omit =
9394
src/measured/_parser.py
9495
src/measured/pytest.py
95-
tests/performance_harness.py
9696

9797
[isort]
9898
profile = black

src/measured/_parser.py

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,6 @@ def match_examples(
139139
self, (UnexpectedToken, UnexpectedEOF)
140140
) and isinstance(ut, (UnexpectedToken, UnexpectedEOF)):
141141
if ut.token == self.token: ##
142-
143142
logger.debug("Exact Match at example [%s][%s]" % (i, j))
144143
return label
145144

@@ -357,7 +356,6 @@ def classify(seq, key=None, value=None):
357356
def _deserialize(data, namespace, memo):
358357
if isinstance(data, dict):
359358
if "__type__" in data: ##
360-
361359
class_ = namespace[data["__type__"]]
362360
return class_.deserialize(data, memo)
363361
elif "@" in data:
@@ -482,7 +480,6 @@ def get_regexp_width(expr):
482480

483481

484482
class Meta:
485-
486483
empty: bool
487484
line: int
488485
column: int
@@ -642,7 +639,6 @@ def _apply_v_args(cls, visit_wrapper):
642639
assert mro[0] is cls
643640
libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)}
644641
for name, value in getmembers(cls):
645-
646642
##
647643

648644
if name.startswith("_") or (
@@ -776,7 +772,6 @@ def _call_userfunc(self, tree, new_children=None):
776772

777773

778774
class TransformerChain(Generic[_Leaf_T, _Return_T]):
779-
780775
transformers: "Tuple[Union[Transformer, TransformerChain], ...]"
781776

782777
def __init__(self, *transformers: "Union[Transformer, TransformerChain]") -> None:
@@ -797,7 +792,6 @@ def __mul__(
797792
class Transformer_InPlace(Transformer):
798793
# --
799794
def _transform_tree(self, tree): ##
800-
801795
return self._call_userfunc(tree)
802796

803797
def transform(self, tree: Tree[_Leaf_T]) -> _Return_T:
@@ -1199,7 +1193,6 @@ def __eq__(self, other):
11991193

12001194

12011195
class Pattern(Serialize, ABC):
1202-
12031196
value: str
12041197
flags: Collection[str]
12051198
raw: Optional[str]
@@ -1309,7 +1302,6 @@ def __repr__(self):
13091302

13101303
def user_repr(self) -> str:
13111304
if self.name.startswith("__"): ##
1312-
13131305
return self.pattern.raw or self.name
13141306
else:
13151307
return self.name
@@ -1522,7 +1514,6 @@ def _build_mres(self, terminals, max_size):
15221514
try:
15231515
mre = self.re_.compile(pattern, self.g_regex_flags)
15241516
except AssertionError: ##
1525-
15261517
return self._build_mres(terminals, max_size // 2)
15271518

15281519
mres.append((mre, {i: n for n, i in mre.groupindex.items()}))
@@ -1604,7 +1595,6 @@ def make_lexer_state(self, text):
16041595

16051596

16061597
class BasicLexer(Lexer):
1607-
16081598
terminals: Collection[TerminalDef]
16091599
ignore_types: FrozenSet[str]
16101600
newline_types: FrozenSet[str]
@@ -1745,7 +1735,6 @@ def next_token(self, lex_state: LexerState, parser_state: Any = None) -> Token:
17451735

17461736

17471737
class ContextualLexer(Lexer):
1748-
17491738
lexers: Dict[str, BasicLexer]
17501739
root_lexer: BasicLexer
17511740

@@ -2021,7 +2010,6 @@ def __call__(self, children):
20212010
if filtered:
20222011
filtered += children[i].children
20232012
else: ##
2024-
20252013
filtered = children[i].children
20262014
else:
20272015
filtered.append(children[i])
@@ -2045,7 +2033,6 @@ def __call__(self, children):
20452033
if filtered:
20462034
filtered += children[i].children
20472035
else: ##
2048-
20492036
filtered = children[i].children
20502037
else:
20512038
filtered.append(children[i])
@@ -2271,7 +2258,6 @@ def default_callback(data, children):
22712258
default_callback = self.tree_class
22722259

22732260
for rule, wrapper_chain in self.rule_builders:
2274-
22752261
user_callback_name = (
22762262
rule.alias or rule.options.template_source or rule.origin.name
22772263
)
@@ -2631,7 +2617,6 @@ def __init__(self, lexer_conf, parser_conf, options, parser=None):
26312617
##
26322618

26332619
if parser: ##
2634-
26352620
self.parser = parser
26362621
else:
26372622
create_parser = _parser_creators.get(parser_conf.parser_type)
@@ -2704,7 +2689,6 @@ def parse_interactive(self, text=None, start=None):
27042689
def _validate_frontend_args(parser, lexer) -> None:
27052690
assert_config(parser, ("lalr", "earley", "cyk"))
27062691
if not isinstance(lexer, type): ##
2707-
27082692
expected = {
27092693
"lalr": ("basic", "contextual"),
27102694
"earley": ("basic", "dynamic", "dynamic_complete"),
@@ -3082,7 +3066,6 @@ def __init__(self, grammar: "Union[Grammar, str, IO[str]]", **options) -> None:
30823066
self._load(cached_parser_data, **options)
30833067
return
30843068
except Exception: ##
3085-
30863069
logger.exception(
30873070
"Failed to load Lark from cache: %r. We will try to carry on."
30883071
% cache_fn

tests/performance_harness.py

Lines changed: 0 additions & 100 deletions
This file was deleted.

tests/test_performance.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
from pytest_benchmark.fixture import BenchmarkFixture
2+
3+
from measured import One, Quantity
4+
from measured.astronomical import JulianYear
5+
from measured.si import Ampere, Meter, Ohm, Second, Volt
6+
from measured.us import Ounce, Ton
7+
8+
9+
def test_quantity_construction(benchmark: BenchmarkFixture) -> None:
10+
def quantity_construction() -> Quantity:
11+
return Quantity(1000001, Meter)
12+
13+
result = benchmark(quantity_construction)
14+
15+
assert result == Quantity(1000001, Meter)
16+
17+
18+
def test_low_dimensional_equality(benchmark: BenchmarkFixture) -> None:
19+
a = Quantity(1000, Meter)
20+
b = Quantity(1000, Meter)
21+
22+
def low_dimensional_equality() -> bool:
23+
return bool(a == b)
24+
25+
assert benchmark(low_dimensional_equality) is True
26+
27+
28+
def test_high_dimensional_equality(benchmark: BenchmarkFixture) -> None:
29+
a = Quantity(1000, Ohm)
30+
b = Quantity(1000, Ohm)
31+
32+
def high_dimensional_equality() -> bool:
33+
return bool(a == b)
34+
35+
assert benchmark(high_dimensional_equality) is True
36+
37+
38+
def test_computing_resistances(benchmark: BenchmarkFixture) -> None:
39+
a = Quantity(1000, Ampere)
40+
v = Quantity(1000, Volt)
41+
42+
def computing_resistances() -> Quantity:
43+
return v / a
44+
45+
assert benchmark(computing_resistances) == Quantity(1, Ohm)
46+
47+
48+
def test_complicated_conversions(benchmark: BenchmarkFixture) -> None:
49+
o = Quantity(1000, Ounce / (JulianYear * Ampere))
50+
t = Quantity(1000, Ton / (Second * Ampere))
51+
52+
def divide() -> Quantity:
53+
return (t / o).in_unit(One)
54+
55+
assert benchmark(divide).unit == One

0 commit comments

Comments
 (0)