Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Consistent and modern type annotations #188

Merged
merged 4 commits into from
Jan 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import shlex
import subprocess
import sys
from argparse import ArgumentParser
from argparse import ArgumentParser, Namespace
from dataclasses import dataclass

logging.basicConfig(format="%(message)s", level=logging.INFO)
Expand Down Expand Up @@ -102,7 +102,7 @@ def execute_example(example: Example, legacy_workspace: bool) -> Result:
return Result(example=cmd, success=False)


def cli():
def cli() -> Namespace:
parser = ArgumentParser()
parser.add_argument(
"--legacy-workspace",
Expand Down
3 changes: 2 additions & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@ explicit_package_bases = true
pretty = true
# Third party dependencies are not picked up by mypy
ignore_missing_imports = true
# Unreliable due to false positives
warn_return_any = false
# Additional checks beyond default settings
warn_redundant_casts = true
warn_unused_ignores = true
warn_no_return = true
warn_return_any = true
warn_unreachable = true
10 changes: 9 additions & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ select = [
"UP",
# flake8-builtins
"A",
# flake8-annotations
"ANN",
# flake8-bugbear
"B",
# flake8-simplify
Expand All @@ -35,9 +37,15 @@ select = [
# Pylint
"PL",
# Ruff-specific
"RUF"
"RUF",
# Ensure we use modern typing syntax
"FA100",
]
ignore = [
# Annotating **kwargs has little value
"ANN003",
# Documenting the type of 'self' is too verbose and in most cases either way obvious
"ANN101",
# It is fine tha some line are longer than the length limit
"E501",
# Number of function arguments is too opinionated
Expand Down
23 changes: 12 additions & 11 deletions src/analyze_includes/evaluate_includes.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

from pathlib import Path
from typing import List

from src.analyze_includes.parse_source import Include
from src.analyze_includes.result import Result
Expand All @@ -11,7 +12,7 @@


def does_include_match_available_files(
include_statement: str, include_paths: List[str], header_files: List[str]
include_statement: str, include_paths: list[str], header_files: list[str]
) -> bool:
for header in header_files:
for inc in include_paths:
Expand All @@ -21,17 +22,17 @@ def does_include_match_available_files(
return False


def _include_resolves_to_any_file(included_path: Path, files: List[str]) -> bool:
def _include_resolves_to_any_file(included_path: Path, files: list[str]) -> bool:
return any(included_path == Path(file).resolve() for file in files)


def _check_for_invalid_includes(
includes: List[Include],
dependencies: List[CcTarget],
includes: list[Include],
dependencies: list[CcTarget],
usage: UsageStatus,
target_under_inspection: CcTarget,
include_paths: List[str],
) -> List[Include]:
include_paths: list[str],
) -> list[Include]:
invalid_includes = []

for inc in includes:
Expand Down Expand Up @@ -82,11 +83,11 @@ def _check_for_invalid_includes(
return invalid_includes


def _check_for_unused_dependencies(dependencies: List[CcTarget]) -> List[str]:
def _check_for_unused_dependencies(dependencies: list[CcTarget]) -> list[str]:
return [dep.name for dep in dependencies if not dep.usage.is_used()]


def _check_for_public_deps_which_should_be_private(dependencies: SystemUnderInspection) -> List[str]:
def _check_for_public_deps_which_should_be_private(dependencies: SystemUnderInspection) -> list[str]:
return [dep.name for dep in dependencies.deps if dep.usage.usage == UsageStatus.PRIVATE]


Expand All @@ -106,8 +107,8 @@ def _filter_empty_dependencies(system_under_inspection: SystemUnderInspection) -


def evaluate_includes(
public_includes: List[Include],
private_includes: List[Include],
public_includes: list[Include],
private_includes: list[Include],
system_under_inspection: SystemUnderInspection,
ensure_private_deps: bool,
) -> Result:
Expand Down
2 changes: 1 addition & 1 deletion src/analyze_includes/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from src.analyze_includes.system_under_inspection import get_system_under_inspection


def cli():
def cli() -> Namespace:
parser = ArgumentParser()
parser.add_argument(
"--public_files",
Expand Down
5 changes: 3 additions & 2 deletions src/analyze_includes/parse_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import json
from pathlib import Path
from typing import Optional

from src.analyze_includes.parse_source import IgnoredIncludes
from src.analyze_includes.std_header import STD_HEADER
Expand All @@ -10,7 +11,7 @@
IGNORED_PATTERNS_KEY = "ignore_include_patterns"


def get_ignored_includes(config_file: Optional[Path]) -> IgnoredIncludes:
def get_ignored_includes(config_file: Path | None) -> IgnoredIncludes:
ignored_paths = STD_HEADER
ignored_patterns = []
if config_file:
Expand Down
17 changes: 9 additions & 8 deletions src/analyze_includes/parse_source.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from __future__ import annotations

import re
from dataclasses import dataclass
from io import StringIO
from pathlib import Path
from typing import List, Optional

from pcpp.preprocessor import Action, OutputDirective, Preprocessor

Expand All @@ -13,7 +14,7 @@ class SimpleParsingPreprocessor(Preprocessor):
points for us is resolving branching statements (e.g. '#ifdef') to analyze the correct code parts.
"""

def on_include_not_found(self, is_malformed, is_system_include, curdir, includepath):
def on_include_not_found(self, is_malformed: bool, is_system_include: bool, curdir: str, includepath: str) -> None:
"""
We ignore missing include statements.

Expand Down Expand Up @@ -67,16 +68,16 @@ class IgnoredIncludes:
or headers chosen by the user.
"""

paths: List[str]
patterns: List[str]
paths: list[str]
patterns: list[str]

def is_ignored(self, include: str) -> bool:
is_ignored_path = include in self.paths
is_ignored_pattern = any(re.match(pattern, include) for pattern in self.patterns)
return is_ignored_path or is_ignored_pattern


def get_includes_from_file(file: Path, defines: List[str], include_paths: List[str]) -> List[Include]:
def get_includes_from_file(file: Path, defines: list[str], include_paths: list[str]) -> list[Include]:
"""
Parse a C/C++ file and extract include statements which are neither commented nor disabled through pre processor
branching (e.g. #ifdef).
Expand Down Expand Up @@ -110,7 +111,7 @@ def get_includes_from_file(file: Path, defines: List[str], include_paths: List[s
return [Include(file=file, include=include.lstrip('"<').rstrip('">')) for include in included_paths]


def filter_includes(includes: List[Include], ignored_includes: IgnoredIncludes) -> List[Include]:
def filter_includes(includes: list[Include], ignored_includes: IgnoredIncludes) -> list[Include]:
"""
- deduplicate list entries
- throw away uninteresting includes (e.g. from standard library or ignored includes provided by the user)
Expand All @@ -120,8 +121,8 @@ def filter_includes(includes: List[Include], ignored_includes: IgnoredIncludes)


def get_relevant_includes_from_files(
files: Optional[List[str]], ignored_includes: IgnoredIncludes, defines: List[str], include_paths: List[str]
) -> List[Include]:
files: list[str] | None, ignored_includes: IgnoredIncludes, defines: list[str], include_paths: list[str]
) -> list[Include]:
all_includes = []
if files:
for file in files:
Expand Down
15 changes: 8 additions & 7 deletions src/analyze_includes/result.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
from __future__ import annotations

from collections import defaultdict
from dataclasses import dataclass, field
from json import dumps
from typing import DefaultDict, List

from src.analyze_includes.parse_source import Include


@dataclass
class Result:
target: str
public_includes_without_dep: List[Include] = field(default_factory=list)
private_includes_without_dep: List[Include] = field(default_factory=list)
unused_deps: List[str] = field(default_factory=list)
unused_impl_deps: List[str] = field(default_factory=list)
deps_which_should_be_private: List[str] = field(default_factory=list)
public_includes_without_dep: list[Include] = field(default_factory=list)
private_includes_without_dep: list[Include] = field(default_factory=list)
unused_deps: list[str] = field(default_factory=list)
unused_impl_deps: list[str] = field(default_factory=list)
deps_which_should_be_private: list[str] = field(default_factory=list)
use_impl_deps: bool = False

def is_ok(self) -> bool:
Expand Down Expand Up @@ -59,7 +60,7 @@ def to_json(self) -> str:
return dumps(content, indent=2) + "\n"

@staticmethod
def _make_includes_map(includes: List[Include]) -> DefaultDict[str, List[str]]:
def _make_includes_map(includes: list[Include]) -> defaultdict[str, list[str]]:
includes_mapping = defaultdict(list)
for inc in includes:
includes_mapping[str(inc.file)].append(inc.include)
Expand Down
25 changes: 13 additions & 12 deletions src/analyze_includes/system_under_inspection.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from __future__ import annotations

import json
from dataclasses import dataclass, field
from enum import Enum, auto
from pathlib import Path
from typing import Any, Dict, List


class UsageStatus(Enum):
Expand Down Expand Up @@ -50,10 +51,10 @@ class CcTarget:
"""A cc_* rule target and the available information associated with it."""

name: str
header_files: List[str]
header_files: list[str]
usage: UsageStatusTracker = field(init=False)

def __post_init__(self):
def __post_init__(self) -> None:
self.usage = UsageStatusTracker()

def __repr__(self) -> str:
Expand All @@ -67,14 +68,14 @@ class SystemUnderInspection:
# Target under inspection
target_under_inspection: CcTarget
# Dependencies which are available to downstream dependencies of the target under inspection
deps: List[CcTarget]
deps: list[CcTarget]
# Dependencies which are NOT available to downstream dependencies of the target under inspection. Exists only for
# cc_library targets
impl_deps: List[CcTarget]
impl_deps: list[CcTarget]
# All include paths available to the target under inspection. Combines all kinds of includes.
include_paths: List[str]
include_paths: list[str]
# Defines influencing the preprocessor
defines: List[str]
defines: list[str]


def _make_cc_target(target_file: Path) -> CcTarget:
Expand All @@ -87,17 +88,17 @@ def _make_cc_target(target_file: Path) -> CcTarget:
return cc_target


def _cc_targets_from_deps(deps: List[Path]) -> List[CcTarget]:
def _cc_targets_from_deps(deps: list[Path]) -> list[CcTarget]:
return [_make_cc_target(dep) for dep in deps]


def _get_include_paths(target_info: Dict[str, Any]) -> List[str]:
def _get_include_paths(target_info: dict[str, list[str]]) -> list[str]:
"""
'.' represents the workspace root relative to which all paths in a Bazel workspace are defined. Our internal logic
does however expect an empty string for the "include relative to workspace root" case.
"""

def replace_dot(paths: List[str]) -> List[str]:
def replace_dot(paths: list[str]) -> list[str]:
return ["" if path == "." else path for path in paths]

return (
Expand All @@ -107,7 +108,7 @@ def replace_dot(paths: List[str]) -> List[str]:
)


def _get_defines(target_info: Dict[str, Any]) -> List[str]:
def _get_defines(target_info: dict[str, list[str]]) -> list[str]:
"""
Defines with values in BUILD files or the compiler CLI can be specified via '<DEFINE_TOKEN>=<VALUE>'. However, this
syntax is not valid for the preprocessor, which expects '<DEFINE_TOKEN> <VALUE>'.
Expand All @@ -116,7 +117,7 @@ def _get_defines(target_info: Dict[str, Any]) -> List[str]:


def get_system_under_inspection(
target_under_inspection: Path, deps: List[Path], impl_deps: List[Path]
target_under_inspection: Path, deps: list[Path], impl_deps: list[Path]
) -> SystemUnderInspection:
with open(target_under_inspection, encoding="utf-8") as target:
target_info = json.load(target)
Expand Down
Loading