Skip to content

Commit

Permalink
code satisfies UP rules (#513)
Browse files Browse the repository at this point in the history
* fix UP rules

* fix typing union on py39

* fix

* fix round

* fix union typing

* fix round

* add eval-type-backport = {version = "^0.2.0", python = "3.9"}
  • Loading branch information
samuelduchesne authored Sep 24, 2024
1 parent 580f8f8 commit 72e80b4
Show file tree
Hide file tree
Showing 40 changed files with 174 additions and 186 deletions.
8 changes: 5 additions & 3 deletions archetypal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
# License: MIT, see full license in LICENSE.txt
# Web: https://github.com/samuelduchesne/archetypal
################################################################################
from __future__ import annotations

import logging as lg
from pathlib import Path
from typing import Any, ClassVar, List, Literal, Optional
from typing import Any, ClassVar, Literal

from energy_pandas.units import unit_registry

Expand Down Expand Up @@ -67,7 +69,7 @@ class Settings(BaseSettings, arbitrary_types_allowed=True, validate_assignment=T
log_filename: str = Field("archetypal")

# usual idfobjects
useful_idf_objects: List[str] = Field(
useful_idf_objects: list[str] = Field(
[
"WINDOWMATERIAL:GAS",
"WINDOWMATERIAL:GLAZING",
Expand Down Expand Up @@ -169,7 +171,7 @@ class Settings(BaseSettings, arbitrary_types_allowed=True, validate_assignment=T
"for ENERGYPLUS_VERSION in os.environ",
)

energyplus_location: Optional[DirectoryPath] = Field(
energyplus_location: DirectoryPath | None = Field(
None,
validation_alias="ENERGYPLUS_LOCATION",
description="Root directory of the EnergyPlus install.",
Expand Down
2 changes: 1 addition & 1 deletion archetypal/eplus_interface/energy_plus.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def success_callback(self):
pass
else:
log(
"Files generated at the end of the simulation: %s" % "\n".join(save_dir.files()),
"Files generated at the end of the simulation: {}".format("\n".join(save_dir.files())),
lg.DEBUG,
name=self.name,
)
Expand Down
2 changes: 1 addition & 1 deletion archetypal/eplus_interface/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class EnergyPlusVersionError(Exception):
"""EnergyPlus Version call error"""

def __init__(self, msg=None, idf_file=None, idf_version=None, ep_version=None):
super(EnergyPlusVersionError, self).__init__(None)
super().__init__(None)
self.msg = msg
self.idf_file = idf_file
self.idf_version = idf_version
Expand Down
5 changes: 2 additions & 3 deletions archetypal/eplus_interface/transition.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,7 @@ def transitions(self) -> list:
@property
def transitions_generator(self):
"""Generate transitions."""
for transition in self.transitions:
yield transition
yield from self.transitions

def __str__(self):
"""Return string representation."""
Expand All @@ -143,7 +142,7 @@ class TransitionThread(Thread):

def __init__(self, idf, tmp, overwrite=False):
"""Initialize Thread."""
super(TransitionThread, self).__init__()
super().__init__()
self.overwrite = overwrite
self.p = None
self.std_out = None
Expand Down
6 changes: 3 additions & 3 deletions archetypal/eplus_interface/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def __init__(self, version):
version = ".".join(map(str, (version.major, version.minor, version.micro)))
if isinstance(version, str) and "-" in version:
version = version.replace("-", ".")
super(EnergyPlusVersion, self).__init__(version)
super().__init__(version)
if self.dash not in self.valid_versions:
raise InvalidEnergyPlusVersion()

Expand Down Expand Up @@ -228,8 +228,8 @@ def get_eplus_basedirs():
return Path("/Applications").dirs("EnergyPlus*")
else:
warnings.warn(
"Archetypal is not compatible with %s. It is only compatible "
"with Windows, Linux or MacOs" % platform.system()
f"Archetypal is not compatible with {platform.system()}. It is only compatible "
"with Windows, Linux or MacOs"
)


Expand Down
2 changes: 1 addition & 1 deletion archetypal/idfclass/extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def makedict(self: Eplusdata, dictfile, fnamefobject):
# scream
if node == "":
continue
log("this node -%s-is not present in base dictionary" % node)
log(f"this node -{node}-is not present in base dictionary")

self.dt, self.dtls = dt, dtls
return dt, dtls
Expand Down
61 changes: 34 additions & 27 deletions archetypal/idfclass/idf.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,14 @@
import uuid
import warnings
from collections import defaultdict
from collections.abc import Iterable
from io import IOBase, StringIO
from itertools import chain
from math import isclose
from typing import IO, Iterable, Literal, Optional, Tuple, Union
from typing import IO, ClassVar, Literal

from typing_extensions import ClassVar
import numpy as np
from sigfig import round

ReportingFrequency = Literal["Annual", "Monthly", "Daily", "Hourly", "Timestep"]

Expand Down Expand Up @@ -205,13 +207,13 @@ def __set_on_dependencies(self, key, value):
if key in self._independant_vars:
self._reset_dependant_vars(key)
key = f"_{key}"
super(IDF, self).__setattr__(key, value)
super().__setattr__(key, value)

def __init__(
self,
idfname: Optional[Union[str, IO, Path]] = None,
idfname: str | IO | Path | None = None,
epw=None,
as_version: Union[str, EnergyPlusVersion] = None,
as_version: str | EnergyPlusVersion = None,
annual=False,
design_day=False,
expandobjects=False,
Expand All @@ -230,7 +232,7 @@ def __init__(
output_directory=None,
outputtype="standard",
encoding=None,
iddname: Optional[Union[str, IO, Path]] = None,
iddname: str | IO | Path | None = None,
reporting_frequency: ReportingFrequency = "Monthly",
**kwargs,
):
Expand Down Expand Up @@ -617,7 +619,7 @@ def output_suffix(self, value):
self._output_suffix = value

@property
def idfname(self) -> Union[Path, StringIO]:
def idfname(self) -> Path | StringIO:
"""Path: The path of the active (parsed) idf model."""
if self._idfname is None:
if self.as_version is None:
Expand Down Expand Up @@ -834,7 +836,7 @@ def sim_id(self) -> str:

# endregion
@property
def sim_info(self) -> Optional[DataFrame]:
def sim_info(self) -> DataFrame | None:
"""DataFrame: Unique number generated for a simulation."""
if self.sql_file is not None:
with sqlite3.connect(self.sql_file) as conn:
Expand All @@ -845,7 +847,7 @@ def sim_info(self) -> Optional[DataFrame]:
return None

@property
def sim_timestamp(self) -> Union[str, Series]:
def sim_timestamp(self) -> str | Series:
"""Return the simulation timestamp or "Never" if not ran yet."""
if self.sim_info is None:
return "Never"
Expand Down Expand Up @@ -1488,7 +1490,7 @@ def savecopy(self, filename, lineendings="default", encoding="latin-1"):
Returns:
Path: The new file path.
"""
super(IDF, self).save(filename, lineendings, encoding)
super().save(filename, lineendings, encoding)
return Path(filename)

def copy(self):
Expand All @@ -1515,7 +1517,7 @@ def save(self, lineendings="default", encoding="latin-1", **kwargs):
Returns:
IDF: The IDF model
"""
super(IDF, self).save(filename=self.idfname, lineendings=lineendings, encoding=encoding)
super().save(filename=self.idfname, lineendings=lineendings, encoding=encoding)
log(f"saved '{self.name}' at '{self.idfname}'")
return self

Expand All @@ -1538,7 +1540,7 @@ def saveas(self, filename, lineendings="default", encoding="latin-1", inplace=Fa
Returns:
IDF: A new IDF object based on the new location file.
"""
super(IDF, self).save(filename=filename, lineendings=lineendings, encoding=encoding)
super().save(filename=filename, lineendings=lineendings, encoding=encoding)

import inspect

Expand Down Expand Up @@ -1719,7 +1721,6 @@ def wwr(self, azimuth_threshold=10, round_to=10):

def roundto(x, to=10.0):
"""Round up to closest `to` number."""
from builtins import round

if to and not math.isnan(x):
return int(round(x / to)) * to
Expand Down Expand Up @@ -1754,7 +1755,6 @@ def roundto(x, to=10.0):

# Create dataframe with wall_area, window_area and wwr as columns and azimuth
# as indexes
from sigfig import round

df = (
pd.DataFrame({"wall_area": total_surface_area, "window_area": total_window_area})
Expand All @@ -1763,8 +1763,15 @@ def roundto(x, to=10.0):
)
df.wall_area = df.wall_area.apply(round, decimals=1)
df.window_area = df.window_area.apply(round, decimals=1)
df["wwr"] = (df.window_area / df.wall_area).fillna(0).apply(round, 2)
df["wwr_rounded_%"] = (df.window_area / df.wall_area * 100).fillna(0).apply(lambda x: roundto(x, to=round_to))
df["wwr"] = (
(df.window_area / df.wall_area).replace([np.inf, -np.inf], np.nan).fillna(0).apply(round, decimals=2)
)
df["wwr_rounded_%"] = (
(df.window_area / df.wall_area * 100)
.replace([np.inf, -np.inf], np.nan)
.fillna(0)
.apply(lambda x: roundto(x, to=round_to))
)
return df

def space_heating_profile(
Expand Down Expand Up @@ -1897,7 +1904,7 @@ def custom_profile(
log(f"Retrieved {name} in {time.time() - start_time:,.2f} seconds")
return series

def newidfobject(self, key, **kwargs) -> Optional[EpBunch]:
def newidfobject(self, key, **kwargs) -> EpBunch | None:
"""Define EpBunch object and add to model.
The function will test if the object exists to prevent duplicates.
Expand Down Expand Up @@ -2040,7 +2047,7 @@ def anidfobject(self, key: str, aname: str = "", **kwargs) -> EpBunch:
abunch = obj2bunch(self.model, self.idd_info, obj)
if aname:
warnings.warn(
"The aname parameter should no longer be used (%s)." % aname,
f"The aname parameter should no longer be used ({aname}).",
UserWarning,
)
namebunch(abunch, aname)
Expand Down Expand Up @@ -2247,7 +2254,7 @@ def set_wwr(
# reviewed as of 2021-11-10.

try:
ggr: Optional[Idf_MSequence] = self.idfobjects["GLOBALGEOMETRYRULES"][0]
ggr: Idf_MSequence | None = self.idfobjects["GLOBALGEOMETRYRULES"][0]
except IndexError:
ggr = None

Expand Down Expand Up @@ -2275,12 +2282,12 @@ def set_wwr(
continue
# remove all subsurfaces
for ss in wall_subsurfaces:
self.rename(ss.key.upper(), ss.Name, "%s window" % wall.Name)
self.rename(ss.key.upper(), ss.Name, f"{wall.Name} window")
self.removeidfobject(ss)
coords = window_vertices_given_wall(wall, wwr)
window = self.newidfobject(
"FENESTRATIONSURFACE:DETAILED",
Name="%s window" % wall.Name,
Name=f"{wall.Name} window",
Surface_Type="Window",
Construction_Name=construction or "",
Building_Surface_Name=wall.Name,
Expand Down Expand Up @@ -2492,7 +2499,7 @@ def coords_are_truly_relative(self):
all_zone_origin_at_0 = False
return ggr_asks_for_relative and not all_zone_origin_at_0

def rotate(self, angle: Optional[float] = None, anchor: Tuple[float, float, float] | None = None):
def rotate(self, angle: float | None = None, anchor: tuple[float, float, float] | None = None):
"""Rotate the IDF counterclockwise around `anchor` by the angle given (degrees).
IF angle is None, rotates to Direction_of_Relative_North specified in Zone
Expand All @@ -2512,7 +2519,7 @@ def rotate(self, angle: Optional[float] = None, anchor: Tuple[float, float, floa

anchor = Vector3D(*anchor)
# Rotate the building
super(IDF, self).rotate(angle, anchor=anchor)
super().rotate(angle, anchor=anchor)
log(f"Geometries rotated by {angle} degrees around " f"{anchor or 'building centroid'}")

# after building is rotate, change the north axis and zone direction to zero.
Expand All @@ -2522,14 +2529,14 @@ def rotate(self, angle: Optional[float] = None, anchor: Tuple[float, float, floa
# Mark the model as rotated
self.rotated = True

def translate(self, vector: Tuple[float, float, float]):
def translate(self, vector: tuple[float, float, float]):
"""Move the IDF in the direction given by a vector."""
if isinstance(vector, tuple):
from geomeppy.geom.vectors import Vector2D

vector = Vector2D(*vector)

super(IDF, self).translate(vector=vector)
super().translate(vector=vector)
self.translated = True

@property
Expand Down Expand Up @@ -2612,13 +2619,13 @@ def _process_csv(file, working_dir, simulname):
tables_out.makedirs_p()
file.copy(tables_out / "%s_%s.csv" % (file.basename().stripext(), simulname))
return
log("try to store file %s in DataFrame" % file)
log(f"try to store file {file} in DataFrame")
try:
df = pd.read_csv(file, sep=",", encoding="us-ascii")
except ParserError:
pass
else:
log("file %s stored" % file)
log(f"file {file} stored")
return df


Expand Down
2 changes: 1 addition & 1 deletion archetypal/idfclass/outputs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Iterable
from collections.abc import Iterable

from archetypal.idfclass.end_use_balance import EndUseBalance
from archetypal.idfclass.extensions import get_name_attribute
Expand Down
18 changes: 9 additions & 9 deletions archetypal/idfclass/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@
from __future__ import annotations

import logging
from collections.abc import Sequence
from datetime import timedelta
from sqlite3 import connect
from typing import List, Optional, Sequence, Union
from typing import Literal

import numpy as np
import pandas as pd
from energy_pandas import EnergyDataFrame
from pandas import to_datetime
from path import Path
from typing_extensions import Literal

from archetypal.utils import log

Expand Down Expand Up @@ -82,7 +82,7 @@ def values(self, environment_type: int = 3, units: str | None = None) -> EnergyD
class _SqlOutputs:
"""Represents all the available outputs from the Sql file."""

def __init__(self, file_path: str, available_outputs: List[tuple]):
def __init__(self, file_path: str, available_outputs: list[tuple]):
self._available_outputs = available_outputs
self._properties = {}

Expand Down Expand Up @@ -148,7 +148,7 @@ def tabular_data_keys(self):
return self._tabular_data_keys

@property
def available_outputs(self) -> List[tuple]:
def available_outputs(self) -> list[tuple]:
"""Get tuples (OutputName, ReportingFrequency) that can be requested.
Any of these outputs when input to data_collections_by_output_name will
Expand Down Expand Up @@ -229,9 +229,9 @@ def full_html_report(self):

def timeseries_by_name(
self,
variable_or_meter: Union[str, Sequence],
reporting_frequency: Union[_REPORTING_FREQUENCIES] = "Hourly",
environment_type: Union[Literal[1, 2, 3]] = 3,
variable_or_meter: str | Sequence,
reporting_frequency: _REPORTING_FREQUENCIES = "Hourly",
environment_type: Literal[1, 2, 3] = 3,
) -> EnergyDataFrame:
"""Get an EnergyDataFrame for specified meters and/or variables.
Expand Down Expand Up @@ -325,7 +325,7 @@ def timeseries_by_name(
return data

def tabular_data_by_name(
self, report_name: str, table_name: str, report_for_string: Optional[str] = None
self, report_name: str, table_name: str, report_for_string: str | None = None
) -> pd.DataFrame:
"""Get (ReportName, TableName) data as DataFrame.
Expand Down Expand Up @@ -371,7 +371,7 @@ def tabular_data_by_name(
pivoted = pivoted.apply(pd.to_numeric, errors="ignore")
return pivoted

def _extract_available_outputs(self) -> List:
def _extract_available_outputs(self) -> list:
"""Extract the list of all available outputs from the SQLite file."""
with connect(self.file_path) as conn:
cols = "Name, ReportingFrequency"
Expand Down
Loading

0 comments on commit 72e80b4

Please sign in to comment.