From f20fd546cedc93a9c08ce970a14c12ed4727bdb6 Mon Sep 17 00:00:00 2001 From: Samuel Letellier-Duchesne Date: Fri, 25 Oct 2024 16:33:14 -0400 Subject: [PATCH] code satisfies B rules (#519) * fix B rules * fix type * idf.simulate return self * fix test * zip for py39 no strict field * config --- archetypal/cli.py | 2 +- archetypal/eplus_interface/version.py | 14 ++++--- archetypal/idfclass/end_use_balance.py | 2 +- archetypal/idfclass/idf.py | 15 +++++--- archetypal/idfclass/meters.py | 2 +- archetypal/idfclass/util.py | 2 +- archetypal/idfclass/variables.py | 2 +- archetypal/plot.py | 4 +- archetypal/schedule.py | 37 ++++++++++--------- archetypal/simple_glazing.py | 4 +- .../template/materials/glazing_material.py | 2 +- archetypal/template/umi_base.py | 4 +- archetypal/umi_template.py | 8 ++-- archetypal/zone_graph.py | 2 +- pyproject.toml | 6 ++- tests/test_idfclass.py | 3 +- tests/test_umi.py | 8 ++-- tests/test_zonegraph.py | 14 +++---- 18 files changed, 71 insertions(+), 60 deletions(-) diff --git a/archetypal/cli.py b/archetypal/cli.py index 96e2a145..bfd78755 100644 --- a/archetypal/cli.py +++ b/archetypal/cli.py @@ -364,7 +364,7 @@ def set_filepaths(idf): settings.logs_folder, ] top = file_or_path.abspath().dirname() - for root, dirs, files in walkdirs(top, excluded_dirs): + for root, _, _ in walkdirs(top, excluded_dirs): pattern = file_or_path.basename() file_paths += tuple(Path(root).files(pattern)) diff --git a/archetypal/eplus_interface/version.py b/archetypal/eplus_interface/version.py index e23f88e3..3cf70e59 100644 --- a/archetypal/eplus_interface/version.py +++ b/archetypal/eplus_interface/version.py @@ -113,7 +113,7 @@ def current_install_dir(self): raise EnergyPlusVersionError(f"EnergyPlusVersion {self.dash} is not installed.") from e @property - def tuple(self) -> tuple: # noqa: A003 + def tuple(self) -> tuple[int, int, int]: """Return the version number as a tuple: (major, minor, micro).""" return self.major, self.minor, self.micro @@ -154,7 +154,7 @@ def valid_idd_paths(self, value): if not value: try: basedirs_ = [] - for version, basedir in self.install_locations.items(): + for _, basedir in self.install_locations.items(): updater_ = basedir / "PreProcess" / "IDFVersionUpdater" if updater_.exists(): basedirs_.append(updater_.files("*.idd")) @@ -176,9 +176,9 @@ def valid_idd_paths(self, value): if match is None: # Match the version in the whole path match = re.search(r"\d+(-\d+)+", iddname) - version = match.group() + _ = match.group() - value[version] = iddname + value[_] = iddname self._valid_paths = dict(sorted(value.items())) @classmethod @@ -222,7 +222,8 @@ def get_eplus_basedirs(): else: warnings.warn( f"Archetypal is not compatible with {platform.system()}. It is only compatible " - "with Windows, Linux or MacOs" + "with Windows, Linux or MacOs", + stacklevel=2, ) @@ -239,5 +240,6 @@ def warn_if_not_compatible(): warnings.warn( "No installation of EnergyPlus could be detected on this " "machine. Please install EnergyPlus from https://energyplus.net before " - "using archetypal" + "using archetypal", + stacklevel=2, ) diff --git a/archetypal/idfclass/end_use_balance.py b/archetypal/idfclass/end_use_balance.py index 180397af..8708f7f2 100644 --- a/archetypal/idfclass/end_use_balance.py +++ b/archetypal/idfclass/end_use_balance.py @@ -500,7 +500,7 @@ def to_df(self, separate_gains_and_losses=False, level="KeyValue"): component_df = getattr(self, component) if not component_df.empty: summary_by_component[component] = component_df.sum(level=level, axis=1).sort_index(axis=1) - for (zone_name, surface_type), data in self.opaque_flow.groupby( + for (_zone_name, surface_type), data in self.opaque_flow.groupby( level=["Zone_Name", "Surface_Type"], axis=1 ): summary_by_component[surface_type] = data.sum(level="Zone_Name", axis=1).sort_index(axis=1) diff --git a/archetypal/idfclass/idf.py b/archetypal/idfclass/idf.py index 58f9560e..e4a6c7a9 100644 --- a/archetypal/idfclass/idf.py +++ b/archetypal/idfclass/idf.py @@ -1413,7 +1413,7 @@ def simulate(self, force=False, **kwargs): e = expandobjects_thread.exception if e is not None: raise e - if expandobjects_thread.cancelled: + elif expandobjects_thread.cancelled: return self # Run the Basement preprocessor program if necessary @@ -1432,7 +1432,7 @@ def simulate(self, force=False, **kwargs): e = basement_thread.exception if e is not None: raise e - if basement_thread.cancelled: + elif basement_thread.cancelled: return self # Run the Slab preprocessor program if necessary @@ -1452,7 +1452,7 @@ def simulate(self, force=False, **kwargs): e = slab_thread.exception if e is not None: raise e - if slab_thread.cancelled: + elif slab_thread.cancelled: return self # Run the energyplus program @@ -1471,7 +1471,9 @@ def simulate(self, force=False, **kwargs): e = running_simulation_thread.exception if e is not None: raise e - return self + elif running_simulation_thread.cancelled: + return self + return self def savecopy(self, filename, lineendings="default", encoding="latin-1"): """Save a copy of the file with the filename passed. @@ -2040,6 +2042,7 @@ def anidfobject(self, key: str, aname: str = "", **kwargs) -> EpBunch: warnings.warn( f"The aname parameter should no longer be used ({aname}).", UserWarning, + stacklevel=2, ) namebunch(abunch, aname) for k, v in kwargs.items(): @@ -2202,7 +2205,7 @@ def rename(self, objkey, objname, newname): for refname in refnames: objlists = eppy.modeleditor.getallobjlists(self, refname) # [('OBJKEY', refname, fieldindexlist), ...] - for robjkey, refname, fieldindexlist in objlists: + for robjkey, _refname, fieldindexlist in objlists: idfobjects = self.idfobjects[robjkey] for idfobject in idfobjects: for findex in fieldindexlist: # for each field @@ -2405,7 +2408,7 @@ def to_world(self): for subsurf in subsurfaces: zone_name = surfaces[subsurf.Building_Surface_Name.upper()].Zone_Name translate([subsurf], zone_origin[zone_name.upper()]) - for surf_name, surf in surfaces.items(): + for _surf_name, surf in surfaces.items(): translate([surf], zone_origin[surf.Zone_Name.upper()]) for day in daylighting_refpoints: zone_name = day.Zone_or_Space_Name diff --git a/archetypal/idfclass/meters.py b/archetypal/idfclass/meters.py index 54067a6e..127a970a 100644 --- a/archetypal/idfclass/meters.py +++ b/archetypal/idfclass/meters.py @@ -133,7 +133,7 @@ def __init__(self, idf, meters_dict: dict): self._idf = idf self._properties = {} - for i, meter in meters_dict.items(): + for _i, meter in meters_dict.items(): meter_name = meter["Key_Name"].replace(":", "__").replace(" ", "_") self._properties[meter_name] = Meter(idf, meter) setattr(self, meter_name, self._properties[meter_name]) diff --git a/archetypal/idfclass/util.py b/archetypal/idfclass/util.py index c45b2997..2b728805 100644 --- a/archetypal/idfclass/util.py +++ b/archetypal/idfclass/util.py @@ -60,7 +60,7 @@ def hash_model(idfname, **kwargs): hasher.update(buf) # Hashing the kwargs as well - for k, v in kwargs.items(): + for _k, v in kwargs.items(): if isinstance(v, (str, bool)): hasher.update(v.__str__().encode("utf-8")) elif isinstance(v, list): diff --git a/archetypal/idfclass/variables.py b/archetypal/idfclass/variables.py index d8aa9094..d6ea8619 100644 --- a/archetypal/idfclass/variables.py +++ b/archetypal/idfclass/variables.py @@ -110,7 +110,7 @@ def __init__(self, idf, variables_dict: dict): self._idf = idf self._properties = {} - for i, variable in variables_dict.items(): + for _i, variable in variables_dict.items(): variable_name = self.normalize_output_name(variable["Variable_Name"]) self._properties[variable_name] = Variable(idf, variable) setattr(self, variable_name, self._properties[variable_name]) diff --git a/archetypal/plot.py b/archetypal/plot.py index 703675b0..592c2b8a 100644 --- a/archetypal/plot.py +++ b/archetypal/plot.py @@ -71,10 +71,10 @@ def save_and_show(fig, ax, save, show, close, filename, file_format, dpi, axis_o if extent is None: if len(ax) == 1: if axis_off: - for ax in ax: + for _ax in ax: # if axis is turned off, constrain the saved # figure's extent to the interior of the axis - extent = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted()) + extent = _ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted()) else: pass fig.savefig( diff --git a/archetypal/schedule.py b/archetypal/schedule.py index 660a0252..6995f203 100644 --- a/archetypal/schedule.py +++ b/archetypal/schedule.py @@ -287,7 +287,7 @@ def get_compact_weekly_ep_schedule_values(epbunch, start_date, index=None, stric if not weekly_schedules.loc[how].empty: # Loop through days and replace with day:schedule values days = [] - for name, day in weekly_schedules.loc[how].groupby(pd.Grouper(freq="D")): + for _name, day in weekly_schedules.loc[how].groupby(pd.Grouper(freq="D")): if not day.empty: ref = epbunch.get_referenced_object(f"ScheduleDay_Name_{i + 1}") day.loc[:] = _ScheduleParser.get_schedule_values( @@ -564,7 +564,7 @@ def get_yearly_ep_schedule_values(cls, epbunch, start_date, strict) -> np.ndarra how = pd.IndexSlice[start_date:end_date] weeks = [] - for name, week in hourly_values.loc[how].groupby(pd.Grouper(freq="168h")): + for _name, week in hourly_values.loc[how].groupby(pd.Grouper(freq="168h")): if not week.empty: try: week.loc[:] = cls.get_schedule_values( @@ -933,15 +933,18 @@ def special_day(schedule_epbunch, field, slicer_, strict, start_date): special_day_types = ["holiday", "customday1", "customday2"] dds = schedule_epbunch.theidf.idfobjects["RunPeriodControl:SpecialDays".upper()] - dd = [ - dd for dd in dds if dd.Special_Day_Type.lower() == field or dd.Special_Day_Type.lower() in special_day_types + special_days = [ + special_day + for special_day in dds + if special_day.Special_Day_Type.lower() == field + or special_day.Special_Day_Type.lower() in special_day_types ] - if len(dd) > 0: - for dd in dd: + if len(special_days) > 0: + for special_day in special_days: # can have more than one special day types - field = dd.Start_Date + field = special_day.Start_Date special_day_start_date = _ScheduleParser._date_field_interpretation(field, start_date) - duration = int(dd.Duration) + duration = int(special_day.Duration) to_date = special_day_start_date + timedelta(days=duration) + timedelta(hours=-1) sp_slicer_.loc[special_day_start_date:to_date] = True @@ -969,12 +972,12 @@ def design_day(schedule_epbunch, field, slicer_, start_date, strict): sp_slicer_ = slicer_.copy() sp_slicer_.loc[:] = False dds = schedule_epbunch.theidf.idfobjects["SizingPeriod:DesignDay".upper()] - dd = [dd for dd in dds if dd.Day_Type.lower() == field] - if len(dd) > 0: - for dd in dd: + design_days = [dd for dd in dds if dd.Day_Type.lower() == field] + if len(design_days) > 0: + for design_day in design_days: # should have found only one design day matching the Day Type - month = dd.Month - day = dd.Day_of_Month + month = design_day.Month + day = design_day.Day_of_Month data = str(month) + "/" + str(day) ep_start_date = _ScheduleParser._date_field_interpretation(data, start_date) ep_orig = datetime(start_date.year, 1, 1) @@ -993,7 +996,7 @@ def design_day(schedule_epbunch, field, slicer_, start_date, strict): f"needed for schedule with Day Type '{field.capitalize()}'" ) raise ValueError(msg) - data = [dd[0].Month, dd[0].Day_of_Month] + data = [design_days[0].Month, design_days[0].Day_of_Month] date = "/".join([str(item).zfill(2) for item in data]) date = _ScheduleParser._date_field_interpretation(date, start_date) return lambda x: x.index == date @@ -1157,12 +1160,12 @@ def all_values(self, value): self._values = validators.iterable(value, maximum_length=8760) @property - def max(self): # noqa: A003 + def max(self): """Get the maximum value of the schedule.""" return max(self.all_values) @property - def min(self): # noqa: A003 + def min(self): """Get the minimum value of the schedule.""" return min(self.all_values) @@ -1368,7 +1371,7 @@ def to_year_week_day(self): from_date = datetime(self.year, 1, 1) bincount = [sum(1 for _ in group) for key, group in groupby(nws + 1) if key] week_order = dict(enumerate(np.array([key for key, group in groupby(nws + 1) if key]) - 1)) - for i, (week_n, count) in enumerate(zip(week_order, bincount)): + for i, (_, count) in enumerate(zip(week_order, bincount)): week_id = list(dict_week)[week_order[i]] to_date = from_date + timedelta(days=int(count * 7), hours=-1) blocks[i] = YearSchedulePart( diff --git a/archetypal/simple_glazing.py b/archetypal/simple_glazing.py index d31b794a..5eb56d32 100644 --- a/archetypal/simple_glazing.py +++ b/archetypal/simple_glazing.py @@ -103,10 +103,10 @@ def calc_simple_glazing(shgc, u_factor, visible_transmittance=None): # sanity checks if T_vis + R_vis_f >= 1.0: - warnings.warn("T_vis + R_vis_f > 1", UserWarning) + warnings.warn("T_vis + R_vis_f > 1", UserWarning, stacklevel=2) T_vis -= (T_vis + R_vis_f - 1) * 1.1 if T_vis + R_vis_b >= 1.0: - warnings.warn("T_vis + R_vis_b > 1", UserWarning) + warnings.warn("T_vis + R_vis_b > 1", UserWarning, stacklevel=2) T_vis -= (T_vis + R_vis_b - 1) * 1.1 # Last Step. Saving results to dict diff --git a/archetypal/template/materials/glazing_material.py b/archetypal/template/materials/glazing_material.py index 9c8f92e9..d0d25fa4 100644 --- a/archetypal/template/materials/glazing_material.py +++ b/archetypal/template/materials/glazing_material.py @@ -254,7 +254,7 @@ def combine(self, other, weights=None, allow_duplicates=False): new_attr = {} for attr, value in self.mapping().items(): if attr not in ["Comments", "DataSource"]: - if isinstance(value, int | float) or isinstance(other, int | float): + if isinstance(value, (int, float)) or isinstance(other, (int, float)): new_attr[attr] = UmiBase.float_mean(self, other, attr=attr, weights=weights) elif isinstance(value, str) or isinstance(other, str): new_attr[attr] = UmiBase._str_mean(self, other, attr=attr, append=False) diff --git a/archetypal/template/umi_base.py b/archetypal/template/umi_base.py index 1c995113..6e75c56b 100644 --- a/archetypal/template/umi_base.py +++ b/archetypal/template/umi_base.py @@ -101,12 +101,12 @@ def Name(self, value): self._name = validators.string(value, coerce_value=True) @property - def id(self): # noqa: A003 + def id(self): """Get or set the id.""" return self._id @id.setter - def id(self, value): # noqa: A003 + def id(self, value): if value is None: value = id(self) self._id = validators.string(value, coerce_value=True) diff --git a/archetypal/umi_template.py b/archetypal/umi_template.py index b83e1e9d..77321092 100644 --- a/archetypal/umi_template.py +++ b/archetypal/umi_template.py @@ -161,7 +161,7 @@ def __getitem__(self, item): def __add__(self, other: UmiTemplateLibrary): """Combined""" - for key, group in other: + for _, group in other: # for each group items for component in group: component.id = None # Reset the component's id @@ -180,7 +180,7 @@ def _clear_components_list(self, except_groups=None): except_groups = [] exception = ["BuildingTemplates"] exception.extend(except_groups) - for key, group in self: + for key, _ in self: if key not in exception: setattr(self, key, []) @@ -188,7 +188,7 @@ def _clear_components_list(self, except_groups=None): def object_list(self): """Get list of all objects in self, including orphaned objects.""" objs = [] - for name, group in self: + for _, group in self: objs.extend(group) return objs @@ -317,7 +317,7 @@ def template_complexity_reduction(idfname, epw, **kwargs): return BuildingTemplate.from_idf(idf, **kwargs) @classmethod - def open(cls, filename): # noqa: A003 + def open(cls, filename): """Initialize an UmiTemplate object from an UMI Template Library File. Args: diff --git a/archetypal/zone_graph.py b/archetypal/zone_graph.py index fa000436..cd4cdb70 100644 --- a/archetypal/zone_graph.py +++ b/archetypal/zone_graph.py @@ -312,7 +312,7 @@ def avg(zone: EpBunch): # Loop on the list of edges to get the x,y,z, coordinates of the # connected nodes # Those two points are the extrema of the line to be plotted - for i, j in enumerate(self.edges()): + for _, j in enumerate(self.edges()): x = np.array((pos[j[0]][0], pos[j[1]][0])) y = np.array((pos[j[0]][1], pos[j[1]][1])) z = np.array((pos[j[0]][2], pos[j[1]][2])) diff --git a/pyproject.toml b/pyproject.toml index b5ecd0be..ebb811f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ select = [ # flake8-bandit # "S", # flake8-bugbear - # "B", + "B", # flake8-builtins "A", # flake8-comprehensions @@ -133,7 +133,9 @@ ignore = [ # raise-vanilla-args "TRY003", # Checks for uses of isinstance and issubclass that take a tuple of types for comparison. - "UP038" + "UP038", + # Python builtin is shadowed by class attribute {name} from {row} + "A003" ] exclude = ["tests/input_data/*", "docker/trnsidf/*", "geomeppy"] diff --git a/tests/test_idfclass.py b/tests/test_idfclass.py index a76567f6..95118acd 100644 --- a/tests/test_idfclass.py +++ b/tests/test_idfclass.py @@ -9,6 +9,7 @@ InvalidEnergyPlusVersion, ) from archetypal.eplus_interface.version import EnergyPlusVersion +from archetypal.idfclass.idf import SimulationNotRunError from archetypal.utils import parallel_process from .conftest import data_dir @@ -347,7 +348,7 @@ def shoebox_res(self, config): def test_retrieve_meters_nosim(self, config, shoebox_res): shoebox_res.simulation_dir.rmtree_p() - with pytest.raises(Exception): + with pytest.raises(SimulationNotRunError): print(shoebox_res.meters) def test_retrieve_meters(self, config, shoebox_res): diff --git a/tests/test_umi.py b/tests/test_umi.py index 1305c920..3403d949 100644 --- a/tests/test_umi.py +++ b/tests/test_umi.py @@ -33,7 +33,7 @@ class TestUmiTemplate: """Test suite for the UmiTemplateLibrary class""" @pytest.fixture(scope="function") - def two_identical_libraries(self): + def two_identical_libraries(self, config): """Yield two identical libraries. Scope of this fixture is `function`.""" file = data_dir / "umi_samples/BostonTemplateLibrary_nodup.json" yield UmiTemplateLibrary.open(file), UmiTemplateLibrary.open(file) @@ -74,7 +74,7 @@ def test_unique_components(self, two_identical_libraries): # missing S. c.unique_components("OpaqueMaterial") - def test_graph(self): + def test_graph(self, config): """Test initialization of networkx DiGraph""" file = data_dir / "umi_samples/BostonTemplateLibrary_2.json" @@ -86,7 +86,7 @@ def test_graph(self): G = a.to_graph(include_orphans=True) assert len(G) > n_nodes - def test_template_to_template(self): + def test_template_to_template(self, config): """load the json into UmiTemplateLibrary object, then convert back to json and compare""" @@ -160,7 +160,7 @@ def read_json(file): return data_dict @pytest.fixture() - def idf(self): + def idf(self, config): yield IDF(prep_outputs=False) @pytest.fixture() diff --git a/tests/test_zonegraph.py b/tests/test_zonegraph.py index 407e8ffa..59fcaad0 100644 --- a/tests/test_zonegraph.py +++ b/tests/test_zonegraph.py @@ -29,7 +29,7 @@ def test_traverse_graph(self, small_office): assert G @pytest.fixture(scope="class") - def G(self, config, small_office): + def G(self, small_office): """ Args: config: @@ -40,7 +40,7 @@ def G(self, config, small_office): yield ZoneGraph.from_idf(idf) @pytest.mark.parametrize("adj_report", [True, False]) - def test_graph(self, config, small_office, adj_report): + def test_graph(self, small_office, adj_report): """Test the creation of a BuildingTemplate zone graph. Parametrize the creation of the adjacency report @@ -61,7 +61,7 @@ def test_graph(self, config, small_office, adj_report): EpBunch, ) - def test_graph_info(self, config, G): + def test_graph_info(self, G): """test the info method on a ZoneGraph Args: @@ -69,7 +69,7 @@ def test_graph_info(self, config, G): """ G.info() - def test_viewgraph2d(self, config, G): + def test_viewgraph2d(self, G): """test the visualization of the zonegraph in 2d Args: @@ -92,7 +92,7 @@ def test_viewgraph2d(self, config, G): ) @pytest.mark.parametrize("annotate", [True, "Name", ("core", None)]) - def test_viewgraph3d(self, config, G, annotate): + def test_viewgraph3d(self, G, annotate): """test the visualization of the zonegraph in 3d Args: @@ -106,7 +106,7 @@ def test_viewgraph3d(self, config, G, annotate): show=False, ) - def test_core_graph(self, config, G): + def test_core_graph(self, G): """ Args: G: @@ -116,7 +116,7 @@ def test_core_graph(self, config, G): assert len(H) == 1 # assert G has no nodes since Warehouse does not have a # core zone - def test_perim_graph(self, config, G): + def test_perim_graph(self, G): """ Args: G: