Skip to content

Commit

Permalink
code satisfies C4 rules (#517)
Browse files Browse the repository at this point in the history
* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix C4 rules

* fix types

* fix typing
  • Loading branch information
samuelduchesne authored Oct 25, 2024
1 parent f6528a6 commit 9e7400c
Show file tree
Hide file tree
Showing 29 changed files with 491 additions and 488 deletions.
72 changes: 36 additions & 36 deletions archetypal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,78 +89,78 @@ class Settings(BaseSettings, arbitrary_types_allowed=True, validate_assignment=T
# Ref: https://bigladdersoftware.com/epx/docs/8-3/output-details-and-examples
# /eplusout.sql.html#schedules-table

available_sqlite_tables: ClassVar[dict] = dict(
ComponentSizes={"PrimaryKey": ["ComponentSizesIndex"], "ParseDates": []},
ConstructionLayers={"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
Constructions={"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
Materials={"PrimaryKey": ["MaterialIndex"], "ParseDates": []},
NominalBaseboardHeaters={
available_sqlite_tables: ClassVar[dict] = {
"ComponentSizes": {"PrimaryKey": ["ComponentSizesIndex"], "ParseDates": []},
"ConstructionLayers": {"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
"Constructions": {"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
"Materials": {"PrimaryKey": ["MaterialIndex"], "ParseDates": []},
"NominalBaseboardHeaters": {
"PrimaryKey": ["NominalBaseboardHeaterIndex"],
"ParseDates": [],
},
NominalElectricEquipment={
"NominalElectricEquipment": {
"PrimaryKey": ["NominalElectricEquipmentIndex"],
"ParseDates": [],
},
NominalGasEquipment={
"NominalGasEquipment": {
"PrimaryKey": ["NominalGasEquipmentIndex"],
"ParseDates": [],
},
NominalHotWaterEquipment={
"NominalHotWaterEquipment": {
"PrimaryKey": ["NominalHotWaterEquipmentIndex"],
"ParseDates": [],
},
NominalInfiltration={
"NominalInfiltration": {
"PrimaryKey": ["NominalInfiltrationIndex"],
"ParseDates": [],
},
NominalLighting={"PrimaryKey": ["NominalLightingIndex"], "ParseDates": []},
NominalOtherEquipment={
"NominalLighting": {"PrimaryKey": ["NominalLightingIndex"], "ParseDates": []},
"NominalOtherEquipment": {
"PrimaryKey": ["NominalOtherEquipmentIndex"],
"ParseDates": [],
},
NominalPeople={"PrimaryKey": ["NominalPeopleIndex"], "ParseDates": []},
NominalSteamEquipment={
"NominalPeople": {"PrimaryKey": ["NominalPeopleIndex"], "ParseDates": []},
"NominalSteamEquipment": {
"PrimaryKey": ["NominalSteamEquipmentIndex"],
"ParseDates": [],
},
NominalVentilation={
"NominalVentilation": {
"PrimaryKey": ["NominalVentilationIndex"],
"ParseDates": [],
},
ReportData={"PrimaryKey": ["ReportDataIndex"], "ParseDates": []},
ReportDataDictionary={
"ReportData": {"PrimaryKey": ["ReportDataIndex"], "ParseDates": []},
"ReportDataDictionary": {
"PrimaryKey": ["ReportDataDictionaryIndex"],
"ParseDates": [],
},
ReportExtendedData={
"ReportExtendedData": {
"PrimaryKey": ["ReportExtendedDataIndex"],
"ParseDates": [],
},
RoomAirModels={"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
Schedules={"PrimaryKey": ["ScheduleIndex"], "ParseDates": []},
Surfaces={"PrimaryKey": ["SurfaceIndex"], "ParseDates": []},
SystemSizes={
"RoomAirModels": {"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
"Schedules": {"PrimaryKey": ["ScheduleIndex"], "ParseDates": []},
"Surfaces": {"PrimaryKey": ["SurfaceIndex"], "ParseDates": []},
"SystemSizes": {
"PrimaryKey": ["SystemSizesIndex"],
"ParseDates": {"PeakHrMin": "%m/%d %H:%M:%S"},
},
Time={"PrimaryKey": ["TimeIndex"], "ParseDates": []},
ZoneGroups={"PrimaryKey": ["ZoneGroupIndex"], "ParseDates": []},
Zones={"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
ZoneLists={"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
ZoneSizes={"PrimaryKey": ["ZoneSizesIndex"], "ParseDates": []},
ZoneInfoZoneLists={"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
Simulations={
"Time": {"PrimaryKey": ["TimeIndex"], "ParseDates": []},
"ZoneGroups": {"PrimaryKey": ["ZoneGroupIndex"], "ParseDates": []},
"Zones": {"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
"ZoneLists": {"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
"ZoneSizes": {"PrimaryKey": ["ZoneSizesIndex"], "ParseDates": []},
"ZoneInfoZoneLists": {"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
"Simulations": {
"PrimaryKey": ["SimulationIndex"],
"ParseDates": {"TimeStamp": {"format": "YMD=%Y.%m.%d %H:%M"}},
},
EnvironmentPeriods={"PrimaryKey": ["EnvironmentPeriodIndex"], "ParseDates": []},
TabularData={"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
Strings={"PrimaryKey": ["StringIndex"], "ParseDates": []},
StringTypes={"PrimaryKey": ["StringTypeIndex"], "ParseDates": []},
TabularDataWithStrings={"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
Errors={"PrimaryKey": ["ErrorIndex"], "ParseDates": []},
)
"EnvironmentPeriods": {"PrimaryKey": ["EnvironmentPeriodIndex"], "ParseDates": []},
"TabularData": {"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
"Strings": {"PrimaryKey": ["StringIndex"], "ParseDates": []},
"StringTypes": {"PrimaryKey": ["StringTypeIndex"], "ParseDates": []},
"TabularDataWithStrings": {"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
"Errors": {"PrimaryKey": ["ErrorIndex"], "ParseDates": []},
}

zone_weight: ZoneWeight = ZoneWeight(n=0)

Expand Down
22 changes: 11 additions & 11 deletions archetypal/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .eplus_interface.exceptions import EnergyPlusVersionError
from .eplus_interface.version import EnergyPlusVersion

CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]}


class CliConfig:
Expand Down Expand Up @@ -297,14 +297,14 @@ def transition(idf, to_version, cores, yes):

to_version = to_version.dash
rundict = {
file: dict(
idfname=file,
as_version=to_version,
check_required=False,
check_length=False,
overwrite=overwrite,
prep_outputs=False,
)
file: {
"idfname": file,
"as_version": to_version,
"check_required": False,
"check_length": False,
"overwrite": overwrite,
"prep_outputs": False,
}
for i, file in enumerate(file_paths)
}
results = parallel_process(
Expand Down Expand Up @@ -352,7 +352,7 @@ def set_filepaths(idf):
file_paths = () # Placeholder for tuple of paths
for file_or_path in idf:
if file_or_path.isfile(): # if a file, concatenate into file_paths
file_paths += tuple([file_or_path])
file_paths += (file_or_path,)
elif file_or_path.isdir(): # if a directory, walkdir (recursive) and get *.idf
file_paths += tuple(file_or_path.walkfiles("*.idf"))
else:
Expand All @@ -368,7 +368,7 @@ def set_filepaths(idf):
pattern = file_or_path.basename()
file_paths += tuple(Path(root).files(pattern))

file_paths = set([f.relpath().expand() for f in file_paths]) # Only keep unique
file_paths = {f.relpath().expand() for f in file_paths} # Only keep unique
# values
if file_paths:
return file_paths
Expand Down
2 changes: 1 addition & 1 deletion archetypal/idfclass/extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_default(self: EpBunch, name):
@extend_class(EpBunch)
def to_dict(self: EpBunch):
"""Get the dict representation of the EpBunch."""
return {k: v for k, v in zip(self.fieldnames, self.fieldvalues)}
return dict(zip(self.fieldnames, self.fieldvalues))


@extend_class(EpBunch)
Expand Down
14 changes: 6 additions & 8 deletions archetypal/idfclass/idf.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@ def from_example_files(cls, example_name, epw=None, **kwargs):
try:
file = next(iter(Pathlib(example_files_dir).rglob(f"{example_name.stem}.idf")))
except StopIteration as e:
full_list = list(map(lambda x: str(x.name), example_files_dir.files("*.idf")))
full_list = [str(x.name) for x in example_files_dir.files("*.idf")]
raise ValueError(f"Choose from: {sorted(full_list)}") from e
if epw is not None:
epw = Path(epw)
Expand All @@ -418,7 +418,7 @@ def from_example_files(cls, example_name, epw=None, **kwargs):
try:
epw = next(iter(Pathlib(dir_weather_data_).rglob(f"{epw.stem}.epw")))
except StopIteration as e:
full_list = list(map(lambda x: str(x.name), dir_weather_data_.files("*.epw")))
full_list = [str(x.name) for x in dir_weather_data_.files("*.epw")]
raise ValueError(f"Choose EPW from: {sorted(full_list)}") from e
return cls(file, epw=epw, **kwargs)

Expand Down Expand Up @@ -1542,9 +1542,7 @@ def saveas(self, filename, lineendings="default", encoding="latin-1", inplace=Fa
import inspect

sig = inspect.signature(IDF.__init__)
kwargs = {
key: getattr(self, key) for key in [a for a in sig.parameters] if key not in ["self", "idfname", "kwargs"]
}
kwargs = {key: getattr(self, key) for key in list(sig.parameters) if key not in ["self", "idfname", "kwargs"]}

as_idf = IDF(filename, **kwargs)
# copy simulation_dir over to new location
Expand Down Expand Up @@ -2375,7 +2373,7 @@ def to_world(self):
if "world" in [o.Coordinate_System.lower() for o in self.idfobjects["GLOBALGEOMETRYRULES"]] or self.translated:
log("Model already set as World coordinates", level=lg.WARNING)
return
zone_angles = set(z.Direction_of_Relative_North or 0 for z in self.idfobjects["ZONE"])
zone_angles = {z.Direction_of_Relative_North or 0 for z in self.idfobjects["ZONE"]}
# If Zones have Direction_of_Relative_North != 0, model needs to be rotated
# before translation.
if all(angle != 0 for angle in zone_angles):
Expand All @@ -2397,7 +2395,7 @@ def to_world(self):
}
surfaces = {s.Name.upper(): s for s in self.getsurfaces()}
subsurfaces = self.getsubsurfaces()
daylighting_refpoints = [p for p in self.idfobjects["DAYLIGHTING:REFERENCEPOINT"]]
daylighting_refpoints = list(self.idfobjects["DAYLIGHTING:REFERENCEPOINT"])
attached_shading_surf_names = []
for g in self.idd_index["ref2names"]["AttachedShadingSurfNames"]:
for item in self.idfobjects[g]:
Expand Down Expand Up @@ -2501,7 +2499,7 @@ def rotate(self, angle: float | None = None, anchor: tuple[float, float, float]
if not angle:
bldg_angle = self.idfobjects["BUILDING"][0].North_Axis or 0
log(f"Building North Axis = {bldg_angle}", level=lg.DEBUG)
zone_angles = set(z.Direction_of_Relative_North for z in self.idfobjects["ZONE"])
zone_angles = {z.Direction_of_Relative_North for z in self.idfobjects["ZONE"]}
assert len(zone_angles) == 1, "Not all zone have the same Direction_of_Relative_North"
zone_angle, *_ = zone_angles
zone_angle = zone_angle or 0
Expand Down
22 changes: 11 additions & 11 deletions archetypal/idfclass/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,10 @@ def __init__(
"""
self.idf = idf
self.reporting_frequency = reporting_frequency
self.output_variables = set(a.Variable_Name for a in idf.idfobjects["Output:Variable".upper()])
self.output_meters = set(
self.output_variables = {a.Variable_Name for a in idf.idfobjects["Output:Variable".upper()]}
self.output_meters = {
(get_name_attribute(a), a.Reporting_Frequency) for a in idf.idfobjects["Output:Meter".upper()]
)
}
self.other_outputs = outputs

self.output_variables += tuple((v, reporting_frequency) for v in variables)
Expand Down Expand Up @@ -289,7 +289,7 @@ def add_basics(self):

def add_schedules(self):
"""Adds Schedules object"""
outputs = [{"key": "Output:Schedules".upper(), **dict(Key_Field="Hourly")}]
outputs = [{"key": "Output:Schedules".upper(), **{"Key_Field": "Hourly"}}]
for output in outputs:
self._other_outputs.append(output)
return self
Expand All @@ -309,7 +309,7 @@ def add_meter_variables(self, format="IDF"):
Returns:
Outputs: self
"""
outputs = [dict(key="Output:VariableDictionary".upper(), Key_Field=format)]
outputs = [{"key": "Output:VariableDictionary".upper(), "Key_Field": format}]
for output in outputs:
self._other_outputs.append(output)
return self
Expand Down Expand Up @@ -338,7 +338,7 @@ def add_summary_report(self, summary="AllSummary"):
outputs = [
{
"key": "Output:Table:SummaryReports".upper(),
**dict(Report_1_Name=summary),
**{"Report_1_Name": summary},
}
]
for output in outputs:
Expand All @@ -361,7 +361,7 @@ def add_sql(self, sql_output_style="SimpleAndTabular"):
Returns:
Outputs: self
"""
outputs = [{"key": "Output:SQLite".upper(), **dict(Option_Type=sql_output_style)}]
outputs = [{"key": "Output:SQLite".upper(), **{"Option_Type": sql_output_style}}]

for output in outputs:
self._other_outputs.append(output)
Expand Down Expand Up @@ -390,7 +390,7 @@ def add_output_control(self, output_control_table_style="CommaAndHTML"):
outputs = [
{
"key": "OutputControl:Table:Style".upper(),
**dict(Column_Separator=output_control_table_style),
**{"Column_Separator": output_control_table_style},
}
]

Expand Down Expand Up @@ -452,7 +452,7 @@ def add_dxf(self):
outputs = [
{
"key": "Output:Surfaces:Drawing".upper(),
**dict(Report_Type="DXF", Report_Specifications_1="ThickPolyline"),
**{"Report_Type": "DXF", "Report_Specifications_1": "ThickPolyline"},
}
]
for output in outputs:
Expand Down Expand Up @@ -704,12 +704,12 @@ def apply(self):
for variable, reporting_frequency in self.output_variables:
self.idf.newidfobject(
key="Output:Variable".upper(),
**dict(Variable_Name=variable, Reporting_Frequency=reporting_frequency),
**{"Variable_Name": variable, "Reporting_Frequency": reporting_frequency},
)
for meter, reporting_frequency in self.output_meters:
self.idf.newidfobject(
key="Output:Meter".upper(),
**dict(Key_Name=meter, Reporting_Frequency=reporting_frequency),
**{"Key_Name": meter, "Reporting_Frequency": reporting_frequency},
)
for output in self.other_outputs:
key = output.pop("key", None)
Expand Down
4 changes: 2 additions & 2 deletions archetypal/schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ def get_compact_ep_schedule_values(epbunch, start_date, strict) -> np.ndarray:
from_time = "00:00"
how_interpolate = None
for field in fields:
if any([spe in field.lower() for spe in field_sets]):
if any(spe in field.lower() for spe in field_sets):
f_set, hour, minute, value = _ScheduleParser._field_interpreter(field, epbunch.Name)

if f_set.lower() == "through":
Expand Down Expand Up @@ -1367,7 +1367,7 @@ def to_year_week_day(self):
blocks = {}
from_date = datetime(self.year, 1, 1)
bincount = [sum(1 for _ in group) for key, group in groupby(nws + 1) if key]
week_order = {i: v for i, v in enumerate(np.array([key for key, group in groupby(nws + 1) if key]) - 1)}
week_order = dict(enumerate(np.array([key for key, group in groupby(nws + 1) if key]) - 1))
for i, (week_n, count) in enumerate(zip(week_order, bincount)):
week_id = list(dict_week)[week_order[i]]
to_date = from_date + timedelta(days=int(count * 7), hours=-1)
Expand Down
38 changes: 19 additions & 19 deletions archetypal/template/building_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,25 +560,25 @@ def mapping(self, validate=False):
if validate:
self.validate()

return dict(
Core=self.Core,
Lifespan=self.Lifespan,
PartitionRatio=self.PartitionRatio,
Perimeter=self.Perimeter,
Structure=self.Structure,
Windows=self.Windows,
Category=self.Category,
Comments=self.Comments,
DataSource=self.DataSource,
Name=self.Name,
YearFrom=self.YearFrom,
YearTo=self.YearTo,
Country=self.Country,
ClimateZone=self.ClimateZone,
Authors=self.Authors,
AuthorEmails=self.AuthorEmails,
Version=self.Version,
)
return {
"Core": self.Core,
"Lifespan": self.Lifespan,
"PartitionRatio": self.PartitionRatio,
"Perimeter": self.Perimeter,
"Structure": self.Structure,
"Windows": self.Windows,
"Category": self.Category,
"Comments": self.Comments,
"DataSource": self.DataSource,
"Name": self.Name,
"YearFrom": self.YearFrom,
"YearTo": self.YearTo,
"Country": self.Country,
"ClimateZone": self.ClimateZone,
"Authors": self.Authors,
"AuthorEmails": self.AuthorEmails,
"Version": self.Version,
}

def get_ref(self, ref):
"""Get item matching reference id.
Expand Down
Loading

0 comments on commit 9e7400c

Please sign in to comment.