From c06abf1a3c8d1507d9b128a91b51babbca650ab9 Mon Sep 17 00:00:00 2001 From: "Haoyu (Daniel) Yang" Date: Sat, 31 Aug 2024 14:01:57 +0800 Subject: [PATCH] Fix `S101`, replace all `assert` in code base (except for tests) (#4017) * turn on S101 for non test code * replace some assert * fix S101 in phonon * fix bad replacement in util.coord * fix S101 in dev_scripts * fix alchemy apps.battery and command_line * fix analysis * fix entries * fix core * fix electronic_structure * fix io.abinit and io.aims * fix cif common and cp2k of io * fix io gaussian packmol exciting and feff * fix io.lammps * fix io.vasp * reapply ignore S101 in tests * print variable values in err messages * augment error message, thanks for the advice @janosh * replace use of assert * use type(x).__name__ in TypeError messages fix typo --------- Co-authored-by: Janosh Riebesell --- .../explicit_permutations_plane_algorithm.py | 6 +- .../get_plane_permutations_optimized.py | 7 +- dev_scripts/regen_libxcfunc.py | 6 +- dev_scripts/update_pt_data.py | 47 ++++++++------ pyproject.toml | 4 +- src/pymatgen/alchemy/transmuters.py | 12 ++-- src/pymatgen/analysis/diffraction/xrd.py | 3 +- src/pymatgen/analysis/elasticity/elastic.py | 3 +- src/pymatgen/analysis/phase_diagram.py | 6 +- .../substitution_probability.py | 3 +- .../structure_prediction/volume_predictor.py | 3 +- .../apps/battery/conversion_battery.py | 3 +- src/pymatgen/command_line/gulp_caller.py | 3 +- src/pymatgen/core/interface.py | 25 ++++--- src/pymatgen/core/operations.py | 3 +- src/pymatgen/core/periodic_table.py | 8 ++- src/pymatgen/core/tensors.py | 6 +- src/pymatgen/core/trajectory.py | 6 +- src/pymatgen/core/xcfunc.py | 6 +- .../electronic_structure/bandstructure.py | 3 +- src/pymatgen/electronic_structure/cohp.py | 42 ++++++++---- src/pymatgen/electronic_structure/dos.py | 21 ++++-- src/pymatgen/entries/compatibility.py | 14 ++-- src/pymatgen/io/abinit/abiobjects.py | 25 ++++--- src/pymatgen/io/abinit/abitimer.py | 15 +++-- src/pymatgen/io/abinit/inputs.py | 23 ++++--- src/pymatgen/io/abinit/netcdf.py | 3 +- src/pymatgen/io/abinit/pseudos.py | 9 ++- src/pymatgen/io/aims/inputs.py | 3 +- src/pymatgen/io/cif.py | 9 ++- src/pymatgen/io/common.py | 15 +++-- src/pymatgen/io/cp2k/inputs.py | 14 ++-- src/pymatgen/io/cp2k/sets.py | 10 ++- src/pymatgen/io/cp2k/utils.py | 6 +- src/pymatgen/io/exciting/inputs.py | 18 +++-- src/pymatgen/io/feff/inputs.py | 3 +- src/pymatgen/io/gaussian.py | 6 +- src/pymatgen/io/lammps/data.py | 65 +++++++++++-------- src/pymatgen/io/lammps/outputs.py | 3 +- src/pymatgen/io/lammps/utils.py | 18 +++-- src/pymatgen/io/lobster/lobsterenv.py | 53 +++++++++++---- src/pymatgen/io/lobster/outputs.py | 3 +- src/pymatgen/io/packmol.py | 3 +- src/pymatgen/io/vasp/outputs.py | 45 ++++++++----- src/pymatgen/io/vasp/sets.py | 33 ++++++---- src/pymatgen/phonon/bandstructure.py | 10 +-- src/pymatgen/phonon/gruneisen.py | 12 ++-- src/pymatgen/phonon/plotter.py | 22 ++++--- src/pymatgen/symmetry/groups.py | 3 +- .../advanced_transformations.py | 4 +- src/pymatgen/util/coord.py | 6 +- src/pymatgen/util/testing/__init__.py | 17 +++-- tests/analysis/diffraction/test_xrd.py | 2 +- tests/core/test_lattice.py | 2 +- tests/io/vasp/test_inputs.py | 2 +- 55 files changed, 448 insertions(+), 254 deletions(-) diff --git a/dev_scripts/chemenv/explicit_permutations_plane_algorithm.py b/dev_scripts/chemenv/explicit_permutations_plane_algorithm.py index bd71080c300..6f2ec7a5d89 100644 --- a/dev_scripts/chemenv/explicit_permutations_plane_algorithm.py +++ b/dev_scripts/chemenv/explicit_permutations_plane_algorithm.py @@ -44,16 +44,14 @@ raise ValueError("Should all be separation plane") perms_on_file = f"Permutations on file in this algorithm ({len(sep_plane_algo._permutations)}) " - print(perms_on_file) - print(sep_plane_algo._permutations) + print(f"{perms_on_file}\n{sep_plane_algo._permutations}") permutations = sep_plane_algo.safe_separation_permutations( ordered_plane=sep_plane_algo.ordered_plane, ordered_point_groups=sep_plane_algo.ordered_point_groups ) sep_plane_algo._permutations = permutations - print(f"Test permutations ({len(permutations)}) :") - print(permutations) + print(f"Test permutations ({len(permutations)}):\n{permutations}") lgf = LocalGeometryFinder() lgf.setup_parameters(structure_refinement=lgf.STRUCTURE_REFINEMENT_NONE) diff --git a/dev_scripts/chemenv/get_plane_permutations_optimized.py b/dev_scripts/chemenv/get_plane_permutations_optimized.py index 1244d13e487..6a6588fe414 100644 --- a/dev_scripts/chemenv/get_plane_permutations_optimized.py +++ b/dev_scripts/chemenv/get_plane_permutations_optimized.py @@ -398,9 +398,10 @@ def random_permutations_iterator(initial_permutation, n_permutations): perms_used[some_perm] += 1 else: perms_used[some_perm] = 1 - tcurrent = time.process_time() - assert n_permutations is not None - time_left = (n_permutations - idx_perm) * (tcurrent - t0) / idx_perm + t_now = time.process_time() + if n_permutations is None: + raise ValueError(f"{n_permutations=}") + time_left = (n_permutations - idx_perm) * (t_now - t0) / idx_perm time_left = f"{time_left:.1f}" idx_perm += 1 print( diff --git a/dev_scripts/regen_libxcfunc.py b/dev_scripts/regen_libxcfunc.py index 98f9936a5c0..7524f460030 100755 --- a/dev_scripts/regen_libxcfunc.py +++ b/dev_scripts/regen_libxcfunc.py @@ -34,10 +34,12 @@ def parse_section(section): section += [line] else: num, entry = parse_section(section) - assert num not in dct + if num in dct: + raise RuntimeError(f"{num=} should not be present in {dct=}.") dct[num] = entry section = [] - assert section == [] + if section: + raise RuntimeError(f"Expected empty section, got {section=}") return dct diff --git a/dev_scripts/update_pt_data.py b/dev_scripts/update_pt_data.py index be1c9dbbf83..7673aabc12c 100644 --- a/dev_scripts/update_pt_data.py +++ b/dev_scripts/update_pt_data.py @@ -20,11 +20,11 @@ except ImportError: BeautifulSoup = None -ptable_yaml_path = "periodic_table.yaml" +PTABLE_YAML_PATH = "periodic_table.yaml" def parse_oxi_state(): - data = loadfn(ptable_yaml_path) + data = loadfn(PTABLE_YAML_PATH) with open("oxidation_states.txt") as file: oxi_data = file.read() oxi_data = re.sub("[\n\r]", "", oxi_data) @@ -62,7 +62,7 @@ def parse_oxi_state(): def parse_ionic_radii(): - data = loadfn(ptable_yaml_path) + data = loadfn(PTABLE_YAML_PATH) with open("ionic_radii.csv") as file: radii_data = file.read() radii_data = radii_data.split("\r") @@ -92,7 +92,7 @@ def parse_ionic_radii(): def parse_radii(): - data = loadfn(ptable_yaml_path) + data = loadfn(PTABLE_YAML_PATH) with open("radii.csv") as file: radii_data = file.read() radii_data = radii_data.split("\r") @@ -128,7 +128,7 @@ def parse_radii(): def update_ionic_radii(): - data = loadfn(ptable_yaml_path) + data = loadfn(PTABLE_YAML_PATH) for dct in data.values(): if "Ionic_radii" in dct: @@ -147,7 +147,7 @@ def update_ionic_radii(): def parse_shannon_radii(): - data = loadfn(ptable_yaml_path) + data = loadfn(PTABLE_YAML_PATH) from openpyxl import load_workbook @@ -179,13 +179,13 @@ def parse_shannon_radii(): if el in data: data[el]["Shannon radii"] = dict(radii[el]) - dumpfn(data, ptable_yaml_path) + dumpfn(data, PTABLE_YAML_PATH) with open("../pymatgen/core/periodic_table.json", mode="w") as file: json.dump(data, file) def gen_periodic_table(): - data = loadfn(ptable_yaml_path) + data = loadfn(PTABLE_YAML_PATH) with open("../pymatgen/core/periodic_table.json", mode="w") as file: json.dump(data, file) @@ -226,7 +226,7 @@ def gen_iupac_ordering(): for el in periodic_table: periodic_table[el].pop("IUPAC ordering", None) - # now add iupac ordering + # now add IUPAC ordering for el in periodic_table: if "IUPAC ordering" in periodic_table[el]: # sanity check that we don't cover the same element twice @@ -253,23 +253,26 @@ def add_electron_affinities(): data += [row] data.pop(0) - ea = {} + element_electron_affinities = {} max_Z = max(Element(element).Z for element in Element.__members__) for r in data: # don't want superheavy elements or less common isotopes - if int(r[0]) > max_Z or r[2] in ea: + if int(r[0]) > max_Z or r[2] in element_electron_affinities: continue temp_str = re.sub(r"[\s\(\)]", "", r[3].strip("()[]")) # hyphen-like characters used that can't be parsed by .float bytes_rep = temp_str.encode("unicode_escape").replace(b"\\u2212", b"-") - ea[r[2]] = float(bytes_rep.decode("unicode_escape")) - - Z_set = {Element.from_name(element).Z for element in ea} - assert Z_set.issuperset(range(1, 93)) # Ensure that we have data for up to U. - print(ea) + element_electron_affinities[r[2]] = float(bytes_rep.decode("unicode_escape")) + + Z_set = {Element.from_name(element).Z for element in element_electron_affinities} + # Ensure that we have data for up to Uranium + if not Z_set.issuperset(range(1, 93)): + missing_electron_affinities = set(range(1, 93)) - Z_set + raise ValueError(f"{missing_electron_affinities=}") + print(element_electron_affinities) pt = loadfn("../pymatgen/core/periodic_table.json") for key, val in pt.items(): - val["Electron affinity"] = ea.get(Element(key).long_name) + val["Electron affinity"] = element_electron_affinities.get(Element(key).long_name) dumpfn(pt, "../pymatgen/core/periodic_table.json") @@ -284,15 +287,17 @@ def add_ionization_energies(): break data = defaultdict(list) for row in table.find_all("tr"): - row = [td.get_text().strip() for td in row.find_all("td")] - if row: + if row := [td.get_text().strip() for td in row.find_all("td")]: Z = int(row[0]) val = re.sub(r"\s", "", row[8].strip("()[]")) val = None if val == "" else float(val) data[Z] += [val] print(data) - print(data[51]) - assert set(data).issuperset(range(1, 93)) # Ensure that we have data for up to U. + + # Ensure that we have data for up to U. + if not set(data).issuperset(range(1, 93)): + raise RuntimeError("Failed to get data up to Uranium") + pt = loadfn("../pymatgen/core/periodic_table.json") for key, val in pt.items(): del val["Ionization energy"] diff --git a/pyproject.toml b/pyproject.toml index 3306d096ec4..7473f4198d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -220,7 +220,6 @@ ignore = [ "PLR2004", # Magic-value-comparison TODO: fix these "PLW2901", # Outer for loop variable overwritten by inner assignment target "PT013", # Incorrect import of pytest - "S101", # Use of "assert" TODO: fix these "S110", # Log for try-except-pass "S112", # Log for try-except-continue "S311", # Use random module for cryptographic purposes @@ -248,7 +247,8 @@ docstring-code-format = true "PLR0124", # comparison-with-itself "PLR2004", # magic-value-comparison "PLR6301", # no-self-use - ] + "S101", # Use of "assert" +] "src/pymatgen/analysis/*" = ["D"] "src/pymatgen/io/*" = ["D"] "dev_scripts/*" = ["D"] diff --git a/src/pymatgen/alchemy/transmuters.py b/src/pymatgen/alchemy/transmuters.py index 2365c1bba71..69049f53493 100644 --- a/src/pymatgen/alchemy/transmuters.py +++ b/src/pymatgen/alchemy/transmuters.py @@ -196,12 +196,12 @@ def append_transformed_structures(self, trafo_structs_or_transmuter): Args: trafo_structs_or_transmuter: A list of transformed structures or a transmuter. """ - if isinstance(trafo_structs_or_transmuter, self.__class__): - self.transformed_structures += trafo_structs_or_transmuter.transformed_structures - else: - for ts in trafo_structs_or_transmuter: - assert isinstance(ts, TransformedStructure) - self.transformed_structures += trafo_structs_or_transmuter + if not isinstance(trafo_structs_or_transmuter, self.__class__) and not all( + isinstance(ts, TransformedStructure) for ts in trafo_structs_or_transmuter + ): + raise TypeError("Some transformed structure has incorrect type.") + + self.transformed_structures += trafo_structs_or_transmuter @classmethod def from_structures(cls, structures, transformations=None, extend_collection=0) -> Self: diff --git a/src/pymatgen/analysis/diffraction/xrd.py b/src/pymatgen/analysis/diffraction/xrd.py index ce5666a81a8..d67cca826be 100644 --- a/src/pymatgen/analysis/diffraction/xrd.py +++ b/src/pymatgen/analysis/diffraction/xrd.py @@ -120,7 +120,8 @@ def __init__(self, wavelength="CuKa", symprec: float = 0, debye_waller_factors=N self.radiation = wavelength self.wavelength = WAVELENGTHS[wavelength] else: - raise TypeError(f"{type(wavelength)=} must be either float, int or str") + wavelength_type = type(wavelength).__name__ + raise TypeError(f"{wavelength_type=} must be either float, int or str") self.symprec = symprec self.debye_waller_factors = debye_waller_factors or {} diff --git a/src/pymatgen/analysis/elasticity/elastic.py b/src/pymatgen/analysis/elasticity/elastic.py index a7386120dd2..ed004da4405 100644 --- a/src/pymatgen/analysis/elasticity/elastic.py +++ b/src/pymatgen/analysis/elasticity/elastic.py @@ -82,7 +82,8 @@ def calculate_stress(self, strain): strain = np.array(strain) if strain.shape == (6,): strain = Strain.from_voigt(strain) - assert strain.shape == (3, 3), "Strain must be 3x3 or Voigt notation" + if strain.shape != (3, 3): + raise ValueError(f"Strain must be 3x3 or Voigt notation, got {strain.shape=}") stress_matrix = self.einsum_sequence([strain] * (self.order - 1)) / factorial(self.order - 1) return Stress(stress_matrix) diff --git a/src/pymatgen/analysis/phase_diagram.py b/src/pymatgen/analysis/phase_diagram.py index 117265614ad..9f09537fa69 100644 --- a/src/pymatgen/analysis/phase_diagram.py +++ b/src/pymatgen/analysis/phase_diagram.py @@ -364,8 +364,10 @@ def __init__( computed_data = self._compute() else: computed_data = MontyDecoder().process_decoded(computed_data) - assert isinstance(computed_data, dict) - # update keys to be Element objects in case they are strings in pre-computed data + if not isinstance(computed_data, dict): + raise TypeError(f"computed_data should be dict, got {type(computed_data).__name__}") + + # Update keys to be Element objects in case they are strings in pre-computed data computed_data["el_refs"] = [(Element(el_str), entry) for el_str, entry in computed_data["el_refs"]] self.computed_data = computed_data self.facets = computed_data["facets"] diff --git a/src/pymatgen/analysis/structure_prediction/substitution_probability.py b/src/pymatgen/analysis/structure_prediction/substitution_probability.py index 045c0ba6be3..285fe86158b 100644 --- a/src/pymatgen/analysis/structure_prediction/substitution_probability.py +++ b/src/pymatgen/analysis/structure_prediction/substitution_probability.py @@ -150,7 +150,8 @@ def cond_prob_list(self, l1, l2): The conditional probability (assuming these species are in l2) """ - assert len(l1) == len(l2) + if len(l1) != len(l2): + raise ValueError("lengths of l1 and l2 mismatch.") p = 1 for s1, s2 in zip(l1, l2, strict=True): p *= self.cond_prob(s1, s2) diff --git a/src/pymatgen/analysis/structure_prediction/volume_predictor.py b/src/pymatgen/analysis/structure_prediction/volume_predictor.py index cc71c6235f1..bd3aaaedbab 100644 --- a/src/pymatgen/analysis/structure_prediction/volume_predictor.py +++ b/src/pymatgen/analysis/structure_prediction/volume_predictor.py @@ -204,7 +204,8 @@ def predict(self, structure: Structure, icsd_vol=False): if sp1 in bp_dict and sp2 in bp_dict: expected_dist = bp_dict[sp1] + bp_dict[sp2] else: - assert sp1.atomic_radius is not None + if sp1.atomic_radius is None: + raise ValueError("atomic_radius of sp1 is None.") expected_dist = sp1.atomic_radius + sp2.atomic_radius if not smallest_ratio or nn.nn_distance / expected_dist < smallest_ratio: diff --git a/src/pymatgen/apps/battery/conversion_battery.py b/src/pymatgen/apps/battery/conversion_battery.py index b84da63efd6..63d631e57cb 100644 --- a/src/pymatgen/apps/battery/conversion_battery.py +++ b/src/pymatgen/apps/battery/conversion_battery.py @@ -80,7 +80,8 @@ def from_composition_and_pd( if len(profile) < 2: return None - assert working_ion_entry is not None + if working_ion_entry is None: + raise ValueError("working_ion_entry is None.") working_ion_symbol = working_ion_entry.elements[0].symbol normalization_els = {el: amt for el, amt in comp.items() if el != Element(working_ion_symbol)} framework = comp.as_dict() diff --git a/src/pymatgen/command_line/gulp_caller.py b/src/pymatgen/command_line/gulp_caller.py index 0e1ae16fdd6..4d7bb3b17e0 100644 --- a/src/pymatgen/command_line/gulp_caller.py +++ b/src/pymatgen/command_line/gulp_caller.py @@ -799,7 +799,8 @@ def __init__(self, bush_lewis_flag): Args: bush_lewis_flag (str): Flag for using Bush or Lewis potential. """ - assert bush_lewis_flag in {"bush", "lewis"} + if bush_lewis_flag not in {"bush", "lewis"}: + raise ValueError(f"bush_lewis_flag should be bush or lewis, got {bush_lewis_flag}") pot_file = "bush.lib" if bush_lewis_flag == "bush" else "lewis.lib" with open(os.path.join(os.environ["GULP_LIB"], pot_file)) as file: # In lewis.lib there is no shell for cation diff --git a/src/pymatgen/core/interface.py b/src/pymatgen/core/interface.py index 107241385d8..7360889233c 100644 --- a/src/pymatgen/core/interface.py +++ b/src/pymatgen/core/interface.py @@ -997,7 +997,8 @@ def get_trans_mat( c2_a2_ratio = 1.0 if ratio is None else ratio[0] / ratio[1] metric = np.array([[1, 0, 0], [0, 1, 0], [0, 0, c2_a2_ratio]]) elif lat_type == "o": - assert ratio is not None, "Invalid ratio for orthorhombic system" + if ratio is None: + raise ValueError(f"Invalid {ratio=} for orthorhombic system") for idx in range(3): if ratio is not None and ratio[idx] is None: ratio[idx] = 1 @@ -1203,9 +1204,13 @@ def get_trans_mat( mv = 1 # Make sure mu, lambda, mv are coprime integers - assert mu is not None - assert lam is not None - assert mv is not None + if mu is None: + raise ValueError("mu is None.") + if lam is None: + raise ValueError("lambda is None.") + if mv is None: + raise ValueError("mv is None.") + if reduce(math.gcd, [mu, lam, mv]) != 1: temp = cast(int, reduce(math.gcd, [mu, lam, mv])) mu = round(mu / temp) @@ -1253,7 +1258,8 @@ def get_trans_mat( raise RuntimeError("Sigma >1000 too large. Are you sure what you are doing, Please check the GB if exist") # Transform surface, r_axis, r_matrix in terms of primitive lattice surface = np.matmul(surface, np.transpose(trans_cry)) - assert surface is not None + if surface is None: + raise ValueError("surface is None.") fractions = [Fraction(x).limit_denominator() for x in surface] least_mul = reduce(lcm, [fraction.denominator for fraction in fractions]) surface = cast(Tuple3Ints, tuple(round(x * least_mul) for x in surface)) @@ -1995,8 +2001,8 @@ def get_rotation_angle_from_sigma( lat_type = lat_type.lower() # Check r_axis length - if lat_type in {"c", "t"}: - assert len(r_axis) == 3, "r_axis length incompatible with selected lattice system" + if lat_type in {"c", "t"} and len(r_axis) != 3: + raise ValueError(f"expect r_axis length 3 for selected lattice system, got {len(r_axis)}") # Check lattice axial ratio length if lat_type == "o" and (ratio is None or len(ratio) != 3): @@ -2484,9 +2490,8 @@ def __init__( vacuum_over_film: vacuum space above the film in Angstroms. Defaults to 0. interface_properties: properties associated with the Interface. Defaults to None. """ - assert ( - "interface_label" in site_properties - ), "Must provide labeling of substrate and film sites in site properties" + if "interface_label" not in site_properties: + raise RuntimeError("Must provide labeling of substrate and film sites in site properties") self._in_plane_offset = np.array(in_plane_offset, dtype="float") self._gap = gap diff --git a/src/pymatgen/core/operations.py b/src/pymatgen/core/operations.py index 34d2d534eb4..63b13519c57 100644 --- a/src/pymatgen/core/operations.py +++ b/src/pymatgen/core/operations.py @@ -154,7 +154,8 @@ def transform_tensor(self, tensor: np.ndarray) -> np.ndarray: """ dim = tensor.shape rank = len(dim) - assert all(val == 3 for val in dim) + if any(val != 3 for val in dim): + raise ValueError("Some dimension in tensor is not 3.") # Build einstein sum string lc = string.ascii_lowercase diff --git a/src/pymatgen/core/periodic_table.py b/src/pymatgen/core/periodic_table.py index 440e7d527f6..63419d5f986 100644 --- a/src/pymatgen/core/periodic_table.py +++ b/src/pymatgen/core/periodic_table.py @@ -1318,7 +1318,8 @@ def get_shannon_radius( Shannon radius for specie in the specified environment. """ radii = self._el.data["Shannon radii"] - assert self._oxi_state is not None + if self._oxi_state is None: + raise ValueError("oxi_state is None.") radii = radii[str(int(self._oxi_state))][cn] if len(radii) == 1: key, data = next(iter(radii.items())) @@ -1358,7 +1359,8 @@ def get_crystal_field_spin( if len(elec) < 4 or elec[-2][1] != "s" or elec[-1][1] != "d": raise AttributeError(f"Invalid element {self.symbol} for crystal field calculation") - assert self.oxi_state is not None + if self.oxi_state is None: + raise ValueError("oxi_state is None.") n_electrons = elec[-1][2] + elec[-2][2] - self.oxi_state if n_electrons < 0 or n_electrons > 10: raise AttributeError(f"Invalid oxidation state {self.oxi_state} for element {self.symbol}") @@ -1634,7 +1636,7 @@ def get_el_sp(obj: int | SpeciesLike) -> Element | Species | DummySpecies: # If obj is an integer, return the Element with atomic number obj try: flt = float(obj) - assert flt == int(flt) + assert flt == int(flt) # noqa: S101 return Element.from_Z(int(flt)) except (AssertionError, ValueError, TypeError, KeyError): pass diff --git a/src/pymatgen/core/tensors.py b/src/pymatgen/core/tensors.py index 162d4229be4..0cef6c2a17f 100644 --- a/src/pymatgen/core/tensors.py +++ b/src/pymatgen/core/tensors.py @@ -577,7 +577,8 @@ def from_values_indices( obj = cls.from_voigt(base) if 6 in shape else cls(base) if populate: - assert structure, "Populate option must include structure input" + if not structure: + raise ValueError("Populate option must include structure input") obj = obj.populate(structure, vsym=vsym, verbose=verbose) elif structure: obj = obj.fit_to_structure(structure) @@ -648,7 +649,8 @@ def merge(old, new) -> None: merge(v, vtrans) guess = type(self).from_voigt(v) - assert guess.shape == self.shape, "Guess must have same shape" + if guess.shape != self.shape: + raise ValueError("Guess must have same shape") converged = False test_new, test_old = [guess.copy()] * 2 for idx in range(maxiter): diff --git a/src/pymatgen/core/trajectory.py b/src/pymatgen/core/trajectory.py index 77df6394bf2..f04cf854e4b 100644 --- a/src/pymatgen/core/trajectory.py +++ b/src/pymatgen/core/trajectory.py @@ -642,8 +642,10 @@ def _combine_site_props( return prop1 # General case - assert prop1 is None or isinstance(prop1, list | dict) - assert prop2 is None or isinstance(prop2, list | dict) + if prop1 is not None and not isinstance(prop1, list | dict): + raise ValueError(f"prop1 should be None, list or dict, got {type(prop1).__name__}.") + if prop2 is not None and not isinstance(prop2, list | dict): + raise ValueError(f"prop2 should be None, list or dict, got {type(prop2).__name__}.") p1_candidates: dict[str, Any] = { "NoneType": [None] * len1, diff --git a/src/pymatgen/core/xcfunc.py b/src/pymatgen/core/xcfunc.py index 0c9d6229199..85c3184f07b 100644 --- a/src/pymatgen/core/xcfunc.py +++ b/src/pymatgen/core/xcfunc.py @@ -181,8 +181,10 @@ def from_abinit_ixc(cls, ixc: int) -> Self | None: x, c = LibxcFunc(int(first)), LibxcFunc(int(last)) if not x.is_x_kind: x, c = c, x # Swap - assert x.is_x_kind - assert c.is_c_kind + if not x.is_x_kind: + raise ValueError("x is not x_kind") + if not c.is_c_kind: + raise ValueError("c is not c_kind") return cls(x=x, c=c) @classmethod diff --git a/src/pymatgen/electronic_structure/bandstructure.py b/src/pymatgen/electronic_structure/bandstructure.py index 43e8dfa3116..59f0e2d95ce 100644 --- a/src/pymatgen/electronic_structure/bandstructure.py +++ b/src/pymatgen/electronic_structure/bandstructure.py @@ -239,7 +239,8 @@ def get_projection_on_elements(self) -> dict[Spin, NDArray]: Spin.down: [][{Element: [values]}]} format. If there is no projections in the band structure, return {}. """ - assert self.structure is not None + if self.structure is None: + raise ValueError("structure is None.") result: dict[Spin, NDArray] = {} for spin, val in self.projections.items(): result[spin] = [[defaultdict(float) for _ in range(len(self.kpoints))] for _ in range(self.nb_bands)] diff --git a/src/pymatgen/electronic_structure/cohp.py b/src/pymatgen/electronic_structure/cohp.py index 687c474db38..acd10438821 100644 --- a/src/pymatgen/electronic_structure/cohp.py +++ b/src/pymatgen/electronic_structure/cohp.py @@ -384,10 +384,12 @@ def get_cohp_by_label( divided_cohp = self.all_cohps[label].get_cohp(spin=None, integrated=False) divided_icohp = self.all_cohps[label].get_icohp(spin=None) - assert divided_cohp is not None + if divided_cohp is None: + raise ValueError("divided_cohp is None") if summed_spin_channels and Spin.down in self.cohp: - assert divided_icohp is not None + if divided_icohp is None: + raise ValueError("divided_icohp is None") final_cohp: dict[Spin, Any] = {Spin.up: np.sum([divided_cohp[Spin.up], divided_cohp[Spin.down]], axis=0)} final_icohp: dict[Spin, Any] | None = { Spin.up: np.sum([divided_icohp[Spin.up], divided_icohp[Spin.down]], axis=0) @@ -424,12 +426,15 @@ def get_summed_cohp_by_label_list( # Check if COHPs are spin polarized first_cohpobject = self.get_cohp_by_label(label_list[0]) summed_cohp = first_cohpobject.cohp.copy() - assert first_cohpobject.icohp is not None + if first_cohpobject.icohp is None: + raise ValueError("icohp of first_cohpobject is None") summed_icohp = first_cohpobject.icohp.copy() for label in label_list[1:]: cohp = self.get_cohp_by_label(label) icohp = cohp.icohp - assert icohp is not None + if icohp is None: + raise ValueError("icohp is None") + summed_cohp[Spin.up] = np.sum([summed_cohp[Spin.up], cohp.cohp[Spin.up]], axis=0) if Spin.down in summed_cohp: @@ -487,15 +492,19 @@ def get_summed_cohp_by_label_and_orbital_list( # Check if COHPs are spin polarized first_cohpobject = self.get_orbital_resolved_cohp(label_list[0], orbital_list[0]) - assert first_cohpobject is not None - assert first_cohpobject.icohp is not None + if first_cohpobject is None: + raise ValueError("first_cohpobject is None") + if first_cohpobject.icohp is None: + raise ValueError("icohp of first_cohpobject is None") summed_cohp = first_cohpobject.cohp.copy() summed_icohp = first_cohpobject.icohp.copy() for idx, label in enumerate(label_list[1:], start=1): cohp = self.get_orbital_resolved_cohp(label, orbital_list[idx]) - assert cohp is not None - assert cohp.icohp is not None + if cohp is None: + raise ValueError("cohp is None.") + if cohp.icohp is None: + raise ValueError("icohp of cohp is None.") summed_cohp[Spin.up] = np.sum([summed_cohp[Spin.up], cohp.cohp.copy()[Spin.up]], axis=0) if Spin.down in summed_cohp: summed_cohp[Spin.down] = np.sum([summed_cohp[Spin.down], cohp.cohp.copy()[Spin.down]], axis=0) @@ -704,7 +713,8 @@ def from_dict(cls, dct: dict[str, Any]) -> Self: else: orb_cohp = {} - assert avg_cohp is not None + if avg_cohp is None: + raise ValueError("avg_cohp is None") return cls( structure, avg_cohp, @@ -1065,7 +1075,8 @@ def icohpvalue_orbital( if isinstance(orbitals, tuple | list): orbitals = f"{orbitals[0]}-{orbitals[1]}" - assert self._orbitals is not None + if self._orbitals is None: + raise ValueError("self._orbitals is None") return self._orbitals[orbitals]["icohp"][spin] @property @@ -1093,8 +1104,10 @@ def summed_orbital_icohp(self) -> dict[str, float]: Returns: dict[str, float]: "str(Orbital1)-str(Ortibal2)": ICOHP value in eV. """ + if self._orbitals is None: + raise ValueError("_orbitals attrib is None.") + orbital_icohp = {} - assert self._orbitals is not None for orb, item in self._orbitals.items(): orbital_icohp[orb] = ( item["icohp"][Spin.up] + item["icohp"][Spin.down] if self._is_spin_polarized else item["icohp"][Spin.up] @@ -1402,10 +1415,13 @@ def get_integrated_cohp_in_energy_range( icohps = cohp.all_cohps[label].get_icohp(spin=None) else: _icohps = cohp.get_orbital_resolved_cohp(label=label, orbitals=orbital) - assert _icohps is not None + if _icohps is None: + raise ValueError("_icohps is None") icohps = _icohps.icohp - assert icohps is not None + if icohps is None: + raise ValueError("ichops is None") + summedicohp = {} if summed_spin_channels and Spin.down in icohps: summedicohp[Spin.up] = icohps[Spin.up] + icohps[Spin.down] diff --git a/src/pymatgen/electronic_structure/dos.py b/src/pymatgen/electronic_structure/dos.py index 08aa86a7eed..8f9e2d779a5 100644 --- a/src/pymatgen/electronic_structure/dos.py +++ b/src/pymatgen/electronic_structure/dos.py @@ -323,7 +323,8 @@ def get_interpolated_gap( band gap, CBM and VBM. """ tdos = self.get_densities(spin) - assert tdos is not None + if tdos is None: + raise ValueError("tdos is None") if not abs_tol: tol = tol * tdos.sum() / tdos.shape[0] @@ -365,7 +366,8 @@ def get_cbm_vbm(self, tol: float = 1e-4, abs_tol: bool = False, spin: Spin | Non """ # Determine tolerance tdos = self.get_densities(spin) - assert tdos is not None + if tdos is None: + raise ValueError("tdos is None") if not abs_tol: tol = tol * tdos.sum() / tdos.shape[0] @@ -914,7 +916,8 @@ def get_band_filling( energies = dos.energies - dos.efermi dos_densities = dos.get_densities(spin=spin) - assert dos_densities is not None + if dos_densities is None: + raise ValueError("dos_densities is None") # Only integrate up to Fermi level energies = dos.energies - dos.efermi @@ -1101,7 +1104,8 @@ def get_n_moment( energies = dos.energies - dos.efermi dos_densities = dos.get_densities(spin=spin) - assert dos_densities is not None + if dos_densities is None: + raise ValueError("dos_densities is None") # Only consider a given energy range if erange: @@ -1196,7 +1200,8 @@ def get_upper_band_edge( energies = transformed_dos.energies - transformed_dos.efermi densities = transformed_dos.get_densities(spin=spin) - assert densities is not None + if densities is None: + raise ValueError("densities is None") # Only consider a given energy range, if specified if erange: @@ -1255,7 +1260,8 @@ def get_dos_fp( try: densities = pdos[fp_type] - assert densities is not None + if densities is None: + raise ValueError("densities is None") if len(energies) < n_bins: inds = np.where((energies >= min_e) & (energies <= max_e)) return DosFingerprint(energies[inds], densities[inds], fp_type, len(energies), np.diff(energies)[0]) @@ -1463,7 +1469,8 @@ def get_site_t2g_eg_resolved_dos( if s == site: for orb, pdos in atom_dos.items(): orbital = _get_orb_lobster(str(orb)) - assert orbital is not None + if orbital is None: + raise ValueError("orbital is None") if orbital in (Orbital.dxy, Orbital.dxz, Orbital.dyz): t2g_dos.append(pdos) diff --git a/src/pymatgen/entries/compatibility.py b/src/pymatgen/entries/compatibility.py index ea810b2ac1b..5cac2a02d95 100644 --- a/src/pymatgen/entries/compatibility.py +++ b/src/pymatgen/entries/compatibility.py @@ -68,10 +68,12 @@ "Te": (-2, -1), } -assert ( # ping @janosh @rkingsbury on GitHub if this fails +# Ping @janosh @rkingsbury on GitHub if this fails +if ( MP2020_COMPAT_CONFIG["Corrections"]["GGAUMixingCorrections"]["O"] - == MP2020_COMPAT_CONFIG["Corrections"]["GGAUMixingCorrections"]["F"] -), "MP2020Compatibility.yaml expected to have the same Hubbard U corrections for O and F" + != MP2020_COMPAT_CONFIG["Corrections"]["GGAUMixingCorrections"]["F"] +): + raise RuntimeError("MP2020Compatibility.yaml expected to have the same Hubbard U corrections for O and F") AnyComputedEntry: TypeAlias = ComputedEntry | ComputedStructureEntry @@ -826,7 +828,8 @@ def get_explanation_dict(self, entry: ComputedEntry) -> dict[str, Any]: else: uncer = uncer_dict.get(str(c), 0) - assert c.__doc__ is not None + if c.__doc__ is None: + raise RuntimeError("__doc__ of some correction is None.") cd = { "name": str(c), "description": c.__doc__.split("Args")[0].strip(), @@ -1452,7 +1455,8 @@ def get_adjustments(self, entry: ComputedEntry) -> list[EnergyAdjustment]: # if H2O and O2 energies have been set explicitly via kwargs, then # all H2 polymorphs will get the same energy. if rform == "H2": - assert self.h2_energy is not None, "H2 energy not set" + if self.h2_energy is None: + raise ValueError("H2 energy not set") adjustments.append( ConstantEnergyAdjustment( (self.fit_h2_energy - self.h2_energy) * comp.num_atoms, diff --git a/src/pymatgen/io/abinit/abiobjects.py b/src/pymatgen/io/abinit/abiobjects.py index bf4aa61a642..b1015f2aea8 100644 --- a/src/pymatgen/io/abinit/abiobjects.py +++ b/src/pymatgen/io/abinit/abiobjects.py @@ -280,8 +280,9 @@ def structure_to_abivars( def contract(string): """ - assert contract("1 1 1 2 2 3") == "3*1 2*2 1*3" - assert contract("1 1 3 2 3") == "2*1 1*3 1*2 1*3". + Examples: + assert contract("1 1 1 2 2 3") == "3*1 2*2 1*3" + assert contract("1 1 3 2 3") == "2*1 1*3 1*2 1*3". """ if not string: return string @@ -724,7 +725,8 @@ def __init__( abivars = {} if mode == KSamplingModes.monkhorst: - assert num_kpts == 0 + if num_kpts != 0: + raise ValueError(f"expect num_kpts to be zero, got {num_kpts}") ngkpt = np.reshape(kpts, 3) shiftk = np.reshape(kpt_shifts, (-1, 3)) @@ -1465,7 +1467,8 @@ def __init__( self.gwpara = gwpara if ppmodel is not None: - assert screening.use_hilbert is False + if screening.use_hilbert: + raise ValueError("cannot use hilbert for screening") self.ppmodel = PPModel.as_ppmodel(ppmodel) self.ecuteps = ecuteps if ecuteps is not None else screening.ecuteps @@ -1529,7 +1532,8 @@ def to_abivars(self): } # TODO: problem with the spin - # assert len(self.bdgw) == self.nkptgw + # if len(self.bdgw) != self.nkptgw: + # raise ValueError("lengths of bdgw and nkptgw mismatch") # ppmodel variables if self.use_ppmodel: @@ -1603,17 +1607,20 @@ def __init__( self.nband = nband self.mbpt_sciss = mbpt_sciss self.coulomb_mode = coulomb_mode - assert coulomb_mode in self._COULOMB_MODES + if coulomb_mode not in self._COULOMB_MODES: + raise ValueError("coulomb_mode not in _COULOMB_MODES") self.ecuteps = ecuteps self.mdf_epsinf = mdf_epsinf self.exc_type = exc_type - assert exc_type in self._EXC_TYPES + if exc_type not in self._EXC_TYPES: + raise ValueError("exc_type not in _EXC_TYPES") self.algo = algo - assert algo in self._ALGO2VAR + if algo not in self._ALGO2VAR: + raise ValueError(f"{algo=} not in {self._ALGO2VAR=}") self.with_lf = with_lf - # if bs_freq_mesh is not given, abinit will select its own mesh. + # If bs_freq_mesh is not given, abinit will select its own mesh. self.bs_freq_mesh = np.array(bs_freq_mesh) if bs_freq_mesh is not None else bs_freq_mesh self.zcut = zcut self.optdriver = 99 diff --git a/src/pymatgen/io/abinit/abitimer.py b/src/pymatgen/io/abinit/abitimer.py index 53d309ad545..a8c9757718f 100644 --- a/src/pymatgen/io/abinit/abitimer.py +++ b/src/pymatgen/io/abinit/abitimer.py @@ -293,7 +293,8 @@ def pefficiency(self): ctime_peff = n * [-1] wtime_peff = n * [-1] - assert sect_name not in peff + if sect_name in peff: + raise ValueError("sect_name should not be in peff") peff[sect_name] = {} peff[sect_name]["cpu_time"] = ctime_peff peff[sect_name]["wall_time"] = wtime_peff @@ -519,7 +520,8 @@ def _order_by_peff(self, key, criterion, reverse=True): values = peff[key][:] if len(values) > 1: ref_value = values.pop(self._ref_idx) - assert ref_value == 1.0 + if ref_value != 1.0: + raise ValueError(f"expect ref_value to be 1.0, got {ref_value}") data.append((sect_name, self.estimator(values))) @@ -661,7 +663,8 @@ def get_section(self, section_name): """Return section associated to `section_name`.""" idx = self.section_names.index(section_name) sect = self.sections[idx] - assert sect.name == section_name + if sect.name != section_name: + raise ValueError(f"{sect.name=} != {section_name=}") return sect def to_csv(self, fileobj=sys.stdout): @@ -734,7 +737,8 @@ def names_and_values(self, key, minval=None, minfract=None, sorted=True): # noq other_val = 0.0 if minval is not None: - assert minfract is None + if minfract is not None: + raise ValueError(f"minfract should be None, got {minfract}") for name, val in zip(names, values, strict=True): if val >= minval: @@ -747,7 +751,8 @@ def names_and_values(self, key, minval=None, minfract=None, sorted=True): # noq new_values.append(other_val) elif minfract is not None: - assert minval is None + if minval is not None: + raise ValueError(f"minval should be None, got {minval}") total = self.sum_sections(key) diff --git a/src/pymatgen/io/abinit/inputs.py b/src/pymatgen/io/abinit/inputs.py index d84a097b8ba..ca9d94bb307 100644 --- a/src/pymatgen/io/abinit/inputs.py +++ b/src/pymatgen/io/abinit/inputs.py @@ -1069,9 +1069,10 @@ def __init__(self, structure: Structure | Sequence[Structure], pseudos, pseudo_d raise ValueError(f"{ndtset=} cannot be <=0") if not isinstance(structure, list | tuple): - self._inputs = [BasicAbinitInput(structure=structure, pseudos=pseudos) for i in range(ndtset)] + self._inputs = [BasicAbinitInput(structure=structure, pseudos=pseudos) for _ in range(ndtset)] else: - assert len(structure) == ndtset + if len(structure) != ndtset: + raise ValueError("length of structure is not equal to ndtset") self._inputs = [BasicAbinitInput(structure=s, pseudos=pseudos) for s in structure] @classmethod @@ -1161,11 +1162,11 @@ def on_all(*args, **kwargs): def __add__(self, other): """Self + other.""" if isinstance(other, BasicAbinitInput): - new_mds = BasicMultiDataset.from_inputs(self) + new_mds = type(self).from_inputs(self) new_mds.append(other) return new_mds - if isinstance(other, BasicMultiDataset): - new_mds = BasicMultiDataset.from_inputs(self) + if isinstance(other, type(self)): + new_mds = type(self).from_inputs(self) new_mds.extend(other) return new_mds @@ -1173,24 +1174,26 @@ def __add__(self, other): def __radd__(self, other): if isinstance(other, BasicAbinitInput): - new_mds = BasicMultiDataset.from_inputs([other]) + new_mds = type(self).from_inputs([other]) new_mds.extend(self) - elif isinstance(other, BasicMultiDataset): - new_mds = BasicMultiDataset.from_inputs(other) + elif isinstance(other, type(self)): + new_mds = type(self).from_inputs(other) new_mds.extend(self) else: raise NotImplementedError("Operation not supported") def append(self, abinit_input): """Add a BasicAbinitInput to the list.""" - assert isinstance(abinit_input, BasicAbinitInput) + if not isinstance(abinit_input, BasicAbinitInput): + raise TypeError(f"abinit_input should be instance of BasicAbinitInput, got {type(abinit_input).__name__}") if any(p1 != p2 for p1, p2 in zip(abinit_input.pseudos, abinit_input.pseudos, strict=True)): raise ValueError("Pseudos must be consistent when from_inputs is invoked.") self._inputs.append(abinit_input) def extend(self, abinit_inputs): """Extends self with a list of BasicAbinitInputs.""" - assert all(isinstance(inp, BasicAbinitInput) for inp in abinit_inputs) + if any(not isinstance(inp, BasicAbinitInput) for inp in abinit_inputs): + raise TypeError("All obj in abinit_inputs should be instance of BasicAbinitInput") for inp in abinit_inputs: if any(p1 != p2 for p1, p2 in zip(self[0].pseudos, inp.pseudos, strict=True)): raise ValueError("Pseudos must be consistent when from_inputs is invoked.") diff --git a/src/pymatgen/io/abinit/netcdf.py b/src/pymatgen/io/abinit/netcdf.py index bd32140125a..c4a23faf367 100644 --- a/src/pymatgen/io/abinit/netcdf.py +++ b/src/pymatgen/io/abinit/netcdf.py @@ -183,7 +183,8 @@ def read_value(self, varname, path="/", cmode=None, default=NO_DEFAULT): except IndexError: return var.getValue() if not var.shape else var[:] - assert var.shape[-1] == 2 + if var.shape[-1] != 2: + raise ValueError(f"{var.shape[-1]=}, expect it to be 2") if cmode == "c": return var[..., 0] + 1j * var[..., 1] raise ValueError(f"Wrong value for {cmode=}") diff --git a/src/pymatgen/io/abinit/pseudos.py b/src/pymatgen/io/abinit/pseudos.py index 72b255d0b1b..fb0ecc28c31 100644 --- a/src/pymatgen/io/abinit/pseudos.py +++ b/src/pymatgen/io/abinit/pseudos.py @@ -1220,7 +1220,8 @@ def __init__(self, filepath): self.valence_states: dict = {} for node in root.find("valence_states"): attrib = AttrDict(node.attrib) - assert attrib.id not in self.valence_states + if attrib.id in self.valence_states: + raise ValueError(f"{attrib.id=} should not be in {self.valence_states=}") self.valence_states[attrib.id] = attrib # Parse the radial grids @@ -1228,7 +1229,8 @@ def __init__(self, filepath): for node in root.findall("radial_grid"): grid_params = node.attrib gid = grid_params["id"] - assert gid not in self.rad_grids + if gid in self.rad_grids: + raise ValueError(f"{gid=} should not be in {self.rad_grids=}") self.rad_grids[gid] = self._eval_grid(grid_params) @@ -1597,7 +1599,8 @@ def __init__(self, pseudos: Sequence[Pseudo]) -> None: def __getitem__(self, Z): """Retrieve pseudos for the atomic number z. Accepts both int and slice objects.""" if isinstance(Z, slice): - assert Z.stop is not None + if Z.stop is None: + raise ValueError("Z.stop is None") pseudos = [] for znum in iterator_from_slice(Z): pseudos.extend(self._pseudos_with_z[znum]) diff --git a/src/pymatgen/io/aims/inputs.py b/src/pymatgen/io/aims/inputs.py index 344cc425bc0..7aac74cf9b1 100644 --- a/src/pymatgen/io/aims/inputs.py +++ b/src/pymatgen/io/aims/inputs.py @@ -522,7 +522,8 @@ def get_content( content += f"# {param}:{val}\n" content += f"{lim}\n" - assert ("smearing" in parameters and "occupation_type" in parameters) is False + if "smearing" in parameters and "occupation_type" in parameters: + raise ValueError(f'both "smearing" and "occupation_type" in {parameters=}') for key, value in parameters.items(): if key in ["species_dir", "plus_u"]: diff --git a/src/pymatgen/io/cif.py b/src/pymatgen/io/cif.py index 46f820497f9..12950174b8f 100644 --- a/src/pymatgen/io/cif.py +++ b/src/pymatgen/io/cif.py @@ -226,7 +226,8 @@ def from_str(cls, string: str) -> Self: items.append("".join(deq.popleft())) n = len(items) // len(columns) - assert len(items) % n == 0 + if len(items) % n != 0: + raise ValueError(f"{len(items)=} is not a multiple of {n=}") loops.append(columns) for k, v in zip(columns * n, items, strict=True): data[k].append(v.strip()) @@ -1157,7 +1158,8 @@ def get_matching_coord( if all_species and len(all_species) == len(all_coords) and len(all_species) == len(all_magmoms): site_properties: dict[str, list] = {} if any(all_hydrogens): - assert len(all_hydrogens) == len(all_coords) + if len(all_hydrogens) != len(all_coords): + raise ValueError("lengths of all_hydrogens and all_coords mismatch") site_properties["implicit_hydrogens"] = all_hydrogens if self.feature_flags["magcif"]: @@ -1167,7 +1169,8 @@ def get_matching_coord( site_properties = {} if any(all_labels): - assert len(all_labels) == len(all_species) + if len(all_labels) != len(all_species): + raise ValueError("lengths of all_labels and all_species mismatch") else: all_labels = None # type: ignore[assignment] diff --git a/src/pymatgen/io/common.py b/src/pymatgen/io/common.py index 00a0cac7932..85fb4a0a03a 100644 --- a/src/pymatgen/io/common.py +++ b/src/pymatgen/io/common.py @@ -166,7 +166,7 @@ def value_at(self, x, y, z): z (float): Fraction of lattice vector c. Returns: - Value from self.data (potentially interpolated) correspondisng to + Value from self.data (potentially interpolated) corresponding to the point (x, y, z). """ return self.interpolator([x, y, z])[0] @@ -184,10 +184,15 @@ def linear_slice(self, p1, p2, n=100): List of n data points (mostly interpolated) representing a linear slice of the data from point p1 to point p2. """ - assert type(p1) in [list, np.ndarray] - assert type(p2) in [list, np.ndarray] - assert len(p1) == 3 - assert len(p2) == 3 + if type(p1) not in {list, np.ndarray}: + raise TypeError(f"type of p1 should be list or np.ndarray, got {type(p1).__name__}") + if len(p1) != 3: + raise ValueError(f"length of p1 should be 3, got {len(p1)}") + if type(p2) not in {list, np.ndarray}: + raise TypeError(f"type of p2 should be list or np.ndarray, got {type(p2).__name__}") + if len(p2) != 3: + raise ValueError(f"length of p2 should be 3, got {len(p2)}") + xpts = np.linspace(p1[0], p2[0], num=n) ypts = np.linspace(p1[1], p2[1], num=n) zpts = np.linspace(p1[2], p2[2], num=n) diff --git a/src/pymatgen/io/cp2k/inputs.py b/src/pymatgen/io/cp2k/inputs.py index be55755ad51..7322cb82da2 100644 --- a/src/pymatgen/io/cp2k/inputs.py +++ b/src/pymatgen/io/cp2k/inputs.py @@ -194,7 +194,8 @@ def __init__(self, keywords: Sequence[Keyword]): Args: keywords: A list of keywords. Must all have the same name (case-insensitive) """ - assert all(k.name.upper() == keywords[0].name.upper() for k in keywords) if keywords else True + if keywords and any(k.name.upper() != keywords[0].name.upper() for k in keywords): + raise ValueError("some keyword is invalid") self.name = keywords[0].name if keywords else None self.keywords = list(keywords) @@ -500,9 +501,10 @@ def inc(self, dct: dict): else: raise TypeError("Can only add sections or keywords.") - def insert(self, d): + def insert(self, d: Section | SectionList) -> None: """Insert a new section as a subsection of the current one.""" - assert isinstance(d, Section | SectionList) + if not isinstance(d, Section | SectionList): + raise TypeError(f"type of d should be Section or SectionList, got {type(d).__name__}") self.subsections[d.alias or d.name] = copy.deepcopy(d) def check(self, path: str): @@ -600,7 +602,8 @@ def __init__(self, sections: Sequence[Section]): Args: sections: A list of keywords. Must all have the same name (case-insensitive) """ - assert all(k.name.upper() == sections[0].name.upper() for k in sections) if sections else True + if sections and any(k.name.upper() != sections[0].name.upper() for k in sections): + raise ValueError("some section name is invalid") self.name = sections[0].name if sections else None self.alias = sections[0].alias if sections else None self.sections = list(sections) @@ -1954,7 +1957,8 @@ def __init__( self.kpts = kpts self.weights = weights or [1] * len(kpts) - assert len(self.kpts) == len(self.weights) + if len(self.kpts) != len(self.weights): + raise ValueError(f"lengths of kpts {len(self.kpts)} and weights {len(self.weights)} mismatch") self.eps_geo = eps_geo self.full_grid = full_grid self.parallel_group_size = parallel_group_size diff --git a/src/pymatgen/io/cp2k/sets.py b/src/pymatgen/io/cp2k/sets.py index e282f2a233a..39157e1303d 100644 --- a/src/pymatgen/io/cp2k/sets.py +++ b/src/pymatgen/io/cp2k/sets.py @@ -22,6 +22,7 @@ import itertools import os import warnings +from typing import TYPE_CHECKING import numpy as np from ruamel.yaml import YAML @@ -68,6 +69,9 @@ from pymatgen.io.vasp.inputs import Kpoints as VaspKpoints from pymatgen.io.vasp.inputs import KpointsSupportedModes +if TYPE_CHECKING: + from typing import Literal + __author__ = "Nicholas Winner" __version__ = "2.0" __email__ = "nwinner@berkeley.edu" @@ -1280,7 +1284,7 @@ def create_subsys(self, structure: Structure | Molecule) -> None: subsys.insert(coord) self["FORCE_EVAL"].insert(subsys) - def modify_dft_print_iters(self, iters, add_last="no"): + def modify_dft_print_iters(self, iters, add_last: Literal["no", "numeric", "symbolic"] = "no"): """ Modify all DFT print iterations at once. Common use is to set iters to the max number of iterations + 1 and then set add_last to numeric. This would have the @@ -1295,7 +1299,9 @@ def modify_dft_print_iters(self, iters, add_last="no"): symbolic: mark last iteration with the letter "l" no: do not explicitly include the last iteration """ - assert add_last.lower() in ["no", "numeric", "symbolic"] + if add_last.lower() not in {"no", "numeric", "symbolic"}: + raise ValueError(f"add_list should be no/numeric/symbolic, got {add_last.lower()}") + run_type = self["global"].get("run_type", Keyword("run_type", "energy")).values[0].upper() if run_type not in ["ENERGY_FORCE", "ENERGY", "WAVEFUNCTION_OPTIMIZATION", "WFN_OPT"] and self.check( "FORCE_EVAL/DFT/PRINT" diff --git a/src/pymatgen/io/cp2k/utils.py b/src/pymatgen/io/cp2k/utils.py index 84373f58ece..fd2b9e8d959 100644 --- a/src/pymatgen/io/cp2k/utils.py +++ b/src/pymatgen/io/cp2k/utils.py @@ -76,7 +76,8 @@ def preprocessor(data: str, dir: str = ".") -> str: # noqa: A002 includes = re.findall(r"(@include.+)", data, re.IGNORECASE) for incl in includes: inc = incl.split() - assert len(inc) == 2 # @include filename + if len(inc) != 2: # @include filename + raise ValueError(f"length of inc should be 2, got {len(inc)}") inc = inc[1].strip("'") inc = inc.strip('"') with zopen(os.path.join(dir, inc)) as file: @@ -84,7 +85,8 @@ def preprocessor(data: str, dir: str = ".") -> str: # noqa: A002 variable_sets = re.findall(r"(@SET.+)", data, re.IGNORECASE) for match in variable_sets: v = match.split() - assert len(v) == 3 # @SET VAR value + if len(v) != 3: # @SET VAR value + raise ValueError(f"length of v should be 3, got {len(v)}") var, value = v[1:] data = re.sub(rf"{match}", "", data) data = re.sub(rf"\${{?{var}}}?", value, data) diff --git a/src/pymatgen/io/exciting/inputs.py b/src/pymatgen/io/exciting/inputs.py index 13f280c8fa0..2d55256665e 100644 --- a/src/pymatgen/io/exciting/inputs.py +++ b/src/pymatgen/io/exciting/inputs.py @@ -85,12 +85,14 @@ def from_str(cls, data: str) -> Self: lockxyz = [] # get title _title = root.find("title") - assert _title is not None, "title cannot be None." + if _title is None: + raise ValueError("title cannot be None.") title_in = str(_title.text) # Read elements and coordinates for nodes in species_node: _speciesfile = nodes.get("speciesfile") - assert _speciesfile is not None, "speciesfile cannot be None." + if _speciesfile is None: + raise ValueError("speciesfile cannot be None.") symbol = _speciesfile.split(".")[0] if len(symbol.split("_")) == 2: symbol = symbol.split("_")[0] @@ -102,7 +104,8 @@ def from_str(cls, data: str) -> Self: for atom in nodes.iter("atom"): _coord = atom.get("coord") - assert _coord is not None, "coordinate cannot be None." + if _coord is None: + raise ValueError("coordinate cannot be None.") x, y, z = _coord.split() positions.append([float(x), float(y), float(z)]) elements.append(element) @@ -113,7 +116,8 @@ def from_str(cls, data: str) -> Self: lxyz = [] _lockxyz = atom.get("lockxyz") - assert _lockxyz is not None, "lockxyz cannot be None." + if _lockxyz is None: + raise ValueError("lockxyz cannot be None.") for line in _lockxyz.split(): if line in ("True", "true"): lxyz.append(True) @@ -129,7 +133,8 @@ def from_str(cls, data: str) -> Self: p[j] = p[j] * ExcitingInput.bohr2ang _crystal = struct.find("crystal") - assert _crystal is not None, "crystal cannot be None." + if _crystal is None: + raise ValueError("crystal cannot be None.") # get the scale attribute scale_in = _crystal.get("scale") @@ -142,7 +147,8 @@ def from_str(cls, data: str) -> Self: # get basis vectors and scale them accordingly basisnode = _crystal.iter("basevect") for vect in basisnode: - assert vect.text is not None, "vectors cannot be None." + if vect.text is None: + raise ValueError("vect.text cannot be None.") x, y, z = vect.text.split() vectors.append( [ diff --git a/src/pymatgen/io/feff/inputs.py b/src/pymatgen/io/feff/inputs.py index 38cb10b5d23..8aad3bc6034 100644 --- a/src/pymatgen/io/feff/inputs.py +++ b/src/pymatgen/io/feff/inputs.py @@ -928,7 +928,8 @@ def __init__(self, atoms, paths, degeneracies=None): self.atoms = atoms self.paths = paths self.degeneracies = degeneracies or [1] * len(paths) - assert len(self.degeneracies) == len(self.paths) + if len(self.degeneracies) != len(self.paths): + raise ValueError(f"{len(self.degeneracies)=} and {len(self.paths)=} mismatch") def __str__(self): lines = ["PATH", "---------------"] diff --git a/src/pymatgen/io/gaussian.py b/src/pymatgen/io/gaussian.py index 4b4df3fe5bb..5b9e2c528b2 100644 --- a/src/pymatgen/io/gaussian.py +++ b/src/pymatgen/io/gaussian.py @@ -294,7 +294,8 @@ def from_str(cls, contents: str) -> Self: for line in lines: if link0_patt.match(line): match = link0_patt.match(line) - assert match is not None + if match is None: + raise ValueError("no match found") link0_dict[match[1].strip("=")] = match[2] route_patt = re.compile(r"^#[sSpPnN]*.*") @@ -313,7 +314,8 @@ def from_str(cls, contents: str) -> Self: functional, basis_set, route_paras, dieze_tag = read_route_line(route) ind = 2 title = [] - assert route_index is not None, "route_index cannot be None" + if route_index is None: + raise ValueError("route_index cannot be None") while lines[route_index + ind].strip(): title.append(lines[route_index + ind].strip()) ind += 1 diff --git a/src/pymatgen/io/lammps/data.py b/src/pymatgen/io/lammps/data.py index 3b807b598e0..a3580e98aa4 100644 --- a/src/pymatgen/io/lammps/data.py +++ b/src/pymatgen/io/lammps/data.py @@ -126,14 +126,16 @@ def __init__(self, bounds: Sequence, tilt: Sequence | None = None) -> None: orthogonal box. """ bounds_arr = np.array(bounds) - assert bounds_arr.shape == (3, 2), f"Expecting a (3, 2) array for bounds, got {bounds_arr.shape}" + if bounds_arr.shape != (3, 2): + raise ValueError(f"Expecting a (3, 2) array for bounds, got {bounds_arr.shape}") self.bounds = bounds_arr.tolist() matrix = np.diag(bounds_arr[:, 1] - bounds_arr[:, 0]) self.tilt = None if tilt is not None: tilt_arr = np.array(tilt) - assert tilt_arr.shape == (3,), f"Expecting a (3,) array for box_tilt, got {tilt_arr.shape}" + if tilt_arr.shape != (3,): + raise ValueError(f"Expecting a (3,) array for box_tilt, got {tilt_arr.shape}") self.tilt = tilt_arr.tolist() matrix[1, 0] = tilt_arr[0] matrix[2, 0] = tilt_arr[1] @@ -258,8 +260,8 @@ class 2 force field are valid keys, and each value is a keys, and each value is a DataFrame. atom_style (str): Output atom_style. Default to "full". """ - if velocities is not None: - assert len(velocities) == len(atoms), "Inconsistency found between atoms and velocities" + if velocities is not None and len(atoms) != len(velocities): + raise ValueError(f"{len(atoms)=} and {len(velocities)=} mismatch") if force_field: all_ff_kws = SECTION_KEYWORDS["ff"] + SECTION_KEYWORDS["class2"] @@ -529,7 +531,8 @@ def disassemble( if atom_labels is None: # add unique labels based on elements for el, vc in masses["element"].value_counts().items(): masses.loc[masses["element"] == el, "label"] = [f"{el}{c}" for c in range(1, vc + 1)] - assert masses["label"].nunique(dropna=False) == len(masses), "Expecting unique atom label for each type" + if masses["label"].nunique(dropna=False) != len(masses): + raise ValueError("Expecting unique atom label for each type") mass_info = [(row.label, row.mass) for row in masses.itertuples()] non_bond_coeffs: list = [] @@ -568,9 +571,10 @@ def label_topo(t) -> tuple: topo_idx = topo[0] - 1 indices = list(topo[1:]) mids = atoms_df.loc[indices]["molecule-ID"].unique() - assert ( - len(mids) == 1 - ), "Do not support intermolecular topology formed by atoms with different molecule-IDs" + if len(mids) != 1: + raise RuntimeError( + "Do not support intermolecular topology formed by atoms with different molecule-IDs" + ) label = label_topo(indices) topo_coeffs[ff_kw][topo_idx]["types"].append(label) if data_by_mols[mids[0]].get(key): @@ -711,15 +715,18 @@ def parse_section(sec_lines) -> tuple[str, pd.DataFrame]: body[name] = df_section err_msg += "Nos. of {} do not match between header and {} section" - assert len(body["Masses"]) == header["types"]["atom"], err_msg.format("atom types", "Masses") + if len(body["Masses"]) != header["types"]["atom"]: + raise RuntimeError(err_msg.format("atom types", "Masses")) atom_sections = ["Atoms", "Velocities"] if "Velocities" in body else ["Atoms"] for atom_sec in atom_sections: - assert len(body[atom_sec]) == header["counts"]["atoms"], err_msg.format("atoms", atom_sec) + if len(body[atom_sec]) != header["counts"]["atoms"]: + raise RuntimeError(err_msg.format("atoms", atom_sec)) for atom_sec in SECTION_KEYWORDS["topology"]: - if header["counts"].get(atom_sec.lower(), 0) > 0: - assert len(body[atom_sec]) == header["counts"][atom_sec.lower()], err_msg.format( - atom_sec.lower(), atom_sec - ) + if ( + header["counts"].get(atom_sec.lower(), 0) > 0 + and len(body[atom_sec]) != header["counts"][atom_sec.lower()] + ): + raise RuntimeError(err_msg.format(atom_sec.lower(), atom_sec)) items = {k.lower(): body[k] for k in ["Masses", "Atoms"]} items["velocities"] = body.get("Velocities") @@ -750,7 +757,8 @@ def from_ff_and_topologies( atom_style (str): Output atom_style. Default to "full". """ atom_types = set.union(*(t.species for t in topologies)) - assert atom_types.issubset(ff.maps["Atoms"]), "Unknown atom type found in topologies" + if not atom_types.issubset(ff.maps["Atoms"]): + raise ValueError("Unknown atom type found in topologies") items = {"box": box, "atom_style": atom_style, "masses": ff.masses, "force_field": ff.force_field} @@ -926,14 +934,13 @@ def __init__( # validate shape if charges is not None: charge_arr = np.array(charges) - assert charge_arr.shape == (len(sites),), "Wrong format for charges" + if charge_arr.shape != (len(sites),): + raise ValueError(f"{charge_arr.shape=} and {(len(sites), )=} mismatch") charges = charge_arr.tolist() if velocities is not None: velocities_arr = np.array(velocities) - assert velocities_arr.shape == ( - len(sites), - 3, - ), "Wrong format for velocities" + if velocities_arr.shape != (len(sites), 3): + raise ValueError(f"{velocities_arr.shape=} and {(len(sites), 3)=} mismatch") velocities = velocities_arr.tolist() if topologies: @@ -1108,7 +1115,8 @@ def map_mass(v): def _process_nonbond(self) -> dict: pair_df = pd.DataFrame(self.nonbond_coeffs) - assert self._is_valid(pair_df), "Invalid nonbond coefficients with rows varying in length" + if not self._is_valid(pair_df): + raise ValueError("Invalid nonbond coefficients with rows varying in length") n_pair, n_coeff = pair_df.shape pair_df.columns = [f"coeff{i}" for i in range(1, n_coeff + 1)] n_mass = len(self.mass_info) @@ -1145,9 +1153,11 @@ def find_eq_types(label, section) -> list: distinct_types = [set(itertools.chain(*(find_eq_types(t, kw) for t in dt))) for dt in distinct_types] type_counts = sum(len(dt) for dt in distinct_types) type_union = set.union(*distinct_types) - assert len(type_union) == type_counts, f"Duplicated items found under different coefficients in {kw}" + if len(type_union) != type_counts: + raise ValueError(f"Duplicated items found under different coefficients in {kw}") atoms = set(np.ravel(list(itertools.chain(*distinct_types)))) - assert atoms.issubset(self.maps["Atoms"]), f"Undefined atom type found in {kw}" + if not atoms.issubset(self.maps["Atoms"]): + raise ValueError(f"Undefined atom type found in {kw}") mapper = {} for i, dt in enumerate(distinct_types, start=1): for t in dt: @@ -1155,7 +1165,8 @@ def find_eq_types(label, section) -> list: def process_data(data) -> pd.DataFrame: df_coeffs = pd.DataFrame(data) - assert self._is_valid(df_coeffs), "Invalid coefficients with rows varying in length" + if not self._is_valid(df_coeffs): + raise ValueError("Invalid coefficients with rows varying in length") n, c = df_coeffs.shape df_coeffs.columns = [f"coeff{i}" for i in range(1, c + 1)] df_coeffs.index = range(1, n + 1) @@ -1281,11 +1292,13 @@ def __init__( type_count += len(mol.masses) mol_count += self.nums[idx] * mols_in_data self.atoms.index += 1 - assert len(self.atoms) == len(self._coordinates), "Wrong number of coordinates" + if len(self.atoms) != len(self._coordinates): + raise ValueError(f"{len(self.atoms)=} and {len(self._coordinates)=} mismatch") self.atoms.update(self._coordinates) self.velocities = None - assert self.mols[0].velocities is None, "Velocities not supported" + if self.mols[0].velocities is not None: + raise RuntimeError("Velocities not supported") self.topology = {} atom_count = 0 diff --git a/src/pymatgen/io/lammps/outputs.py b/src/pymatgen/io/lammps/outputs.py index d8163dcf958..0bccb66d4c4 100644 --- a/src/pymatgen/io/lammps/outputs.py +++ b/src/pymatgen/io/lammps/outputs.py @@ -170,7 +170,8 @@ def _parse_thermo(lines: list[str]) -> pd.DataFrame: for ts in time_steps: data = {} step = re.match(multi_pattern, ts[0]) - assert step is not None + if step is None: + raise ValueError("step is None") data["Step"] = int(step[1]) data |= {k: float(v) for k, v in re.findall(kv_pattern, "".join(ts[1:]))} dicts.append(data) diff --git a/src/pymatgen/io/lammps/utils.py b/src/pymatgen/io/lammps/utils.py index 5e940bac148..43d4ae34e7f 100644 --- a/src/pymatgen/io/lammps/utils.py +++ b/src/pymatgen/io/lammps/utils.py @@ -380,13 +380,18 @@ def convert_obatoms_to_molecule( ref = self.map_residue_to_mol[residue_name].copy() - # sanity check - assert len(mol) == len(ref) - assert ref.formula == mol.formula + # Sanity check + if len(mol) != len(ref): + raise ValueError(f"lengths of mol {len(mol)} and ref {len(ref)} mismatch") + if ref.formula != mol.formula: + raise ValueError("formula of ref and mol is not the same") - # the packed molecules have the atoms in the same order..sigh! + # The packed molecules have the atoms in the same order..sigh! for idx, site in enumerate(mol): - assert site.specie.symbol == ref[idx].specie.symbol + if site.specie.symbol != ref[idx].specie.symbol: + raise ValueError( + f"symbols of site species {site.specie.symbol} and ref {ref[idx].specie.symbol} mismatch" + ) props.append(getattr(ref[idx], site_property)) mol.add_site_property(site_property, props) @@ -411,7 +416,8 @@ def restore_site_properties(self, site_property: str = "ff_map", filename: str | bma = BabelMolAdaptor.from_file(filename, "pdb") pbm = pybel.Molecule(bma._ob_mol) - assert len(pbm.residues) == sum(param["number"] for param in self.param_list) + if len(pbm.residues) != sum(param["number"] for param in self.param_list): + raise ValueError(f"lengths of pbm.residues {len(pbm.residues)} and number in param_list mismatch") packed_mol = self.convert_obatoms_to_molecule( pbm.residues[0].atoms, diff --git a/src/pymatgen/io/lobster/lobsterenv.py b/src/pymatgen/io/lobster/lobsterenv.py index 03bc7957ee7..9244e11ad9f 100644 --- a/src/pymatgen/io/lobster/lobsterenv.py +++ b/src/pymatgen/io/lobster/lobsterenv.py @@ -348,7 +348,9 @@ def get_light_structure_environment( valences=self.valences, ) - assert self.valences is not None + if self.valences is None: + raise ValueError(f"{self.valences=}") + for idx, val in enumerate(self.valences): if val >= 0.0: new_list_ce_symbols.append(list_ce_symbols[idx]) @@ -411,20 +413,25 @@ def get_info_icohps_to_neighbors( """ if self.valences is None and onlycation_isites: raise ValueError("No valences are provided") + if isites is None: if onlycation_isites: - assert self.valences is not None + if self.valences is None: + raise ValueError(f"{self.valences}=") + isites = [idx for idx in range(len(self.structure)) if self.valences[idx] >= 0.0] else: isites = list(range(len(self.structure))) + if self.Icohpcollection is None: + raise ValueError(f"{self.Icohpcollection=}") + summed_icohps: float = 0.0 list_icohps: list[float] = [] number_bonds: int = 0 labels: list[str] = [] atoms: list[list[str]] = [] final_isites: list[int] = [] - assert self.Icohpcollection is not None for idx, _site in enumerate(self.structure): if idx in isites: for key, icohpsum in zip(self.list_keys[idx], self.list_icohps[idx], strict=True): @@ -548,7 +555,9 @@ def get_info_cohps_to_neighbors( # Check that the number of bonds in ICOHPLIST and COHPCAR are identical # TODO: Further checks could be implemented - assert self.Icohpcollection is not None + if self.Icohpcollection is None: + raise ValueError(f"{self.Icohpcollection=}") + if len(self.Icohpcollection._list_atom1) != len(self.completecohp.bonds): raise ValueError("COHPCAR and ICOHPLIST do not fit together") @@ -572,7 +581,9 @@ def get_info_cohps_to_neighbors( # Iterate through labels and atoms and check which bonds can be included new_labels = [] new_atoms = [] - assert final_isites is not None + if final_isites is None: + raise ValueError(f"{final_isites=}") + for key, atompair, isite in zip(labels, atoms, final_isites, strict=True): present = False for atomtype in only_bonds_to: @@ -646,7 +657,9 @@ def get_info_icohps_between_neighbors( if isites is None: if onlycation_isites: - assert self.valences is not None + if self.valences is None: + raise ValueError(f"{self.valences=}") + isites = [idx for idx in range(len(self.structure)) if self.valences[idx] >= 0.0] else: isites = list(range(len(self.structure))) @@ -656,7 +669,9 @@ def get_info_icohps_between_neighbors( number_bonds: int = 0 labels: list[str] = [] atoms: list[list[str]] = [] - assert self.Icohpcollection is not None + if self.Icohpcollection is None: + raise ValueError(f"{self.Icohpcollection=}") + for isite in isites: for site_idx, n_site in enumerate(self.list_neighsite[isite]): for site2_idx, n_site2 in enumerate(self.list_neighsite[isite]): @@ -750,7 +765,9 @@ def _evaluate_ce( """ # Get extremum if lowerlimit is None and upperlimit is None: - assert self.Icohpcollection is not None + if self.Icohpcollection is None: + raise ValueError(f"{self.Icohpcollection=}") + limits = self._get_limit_from_extremum( self.Icohpcollection, percentage=perc_strength_icohp, @@ -758,7 +775,8 @@ def _evaluate_ce( additional_condition=additional_condition, ) - assert limits is not None + if limits is None: + raise ValueError(f"{limits=}") lowerlimit, upperlimit = limits elif upperlimit is None or lowerlimit is None: @@ -780,8 +798,10 @@ def _evaluate_ce( # Make sure everything is relative to the given Structure and # not just the atoms in the unit cell if self.add_additional_data_sg: - assert self.bonding_list_1.icohpcollection is not None - assert self.bonding_list_2.icohpcollection is not None + if self.bonding_list_1.icohpcollection is None: + raise ValueError(f"{self.bonding_list_1.icohpcollection=}") + if self.bonding_list_2.icohpcollection is None: + raise ValueError(f"{self.bonding_list_2.icohpcollection=}") self.sg_list = [ [ @@ -889,7 +909,9 @@ def _find_environments( list_coords: list[list[NDArray]] = [] # Run over structure - assert self.Icohpcollection is not None + if self.Icohpcollection is None: + raise ValueError(f"{self.Icohpcollection=}") + for idx, site in enumerate(self.structure): icohps = self._get_icohps( icohpcollection=self.Icohpcollection, @@ -1010,7 +1032,8 @@ def _find_relevant_atoms_additional_condition( # Check additional conditions val1 = val2 = None - assert self.valences is not None + if self.valences is None: + raise ValueError(f"{self.valences=}") if additional_condition in {1, 3, 5, 6}: val1 = self.valences[atomnr1] val2 = self.valences[atomnr2] @@ -1227,7 +1250,9 @@ def _get_limit_from_extremum( or [max(strongest_icohp*0.15, noise_cutoff), inf]. """ extremum_based = None - assert self.valences is not None + + if self.valences is None: + raise ValueError(f"{self.valences=}") if not adapt_extremum_to_add_cond or additional_condition == 0: extremum_based = icohpcollection.extremum_icohpvalue(summed_spin_channels=True) * percentage diff --git a/src/pymatgen/io/lobster/outputs.py b/src/pymatgen/io/lobster/outputs.py index f8e4032b6e8..3f777f4a1d8 100644 --- a/src/pymatgen/io/lobster/outputs.py +++ b/src/pymatgen/io/lobster/outputs.py @@ -515,7 +515,8 @@ def __init__( def icohplist(self) -> dict[Any, dict[str, Any]]: """The ICOHP list compatible with older version of this class.""" icohp_dict = {} - assert self._icohpcollection is not None + if self._icohpcollection is None: + raise ValueError(f"{self._icohpcollection=}") for key, value in self._icohpcollection._icohplist.items(): icohp_dict[key] = { diff --git a/src/pymatgen/io/packmol.py b/src/pymatgen/io/packmol.py index 515572d99d8..b8e467f441d 100644 --- a/src/pymatgen/io/packmol.py +++ b/src/pymatgen/io/packmol.py @@ -213,7 +213,8 @@ def get_input_set( raise TypeError("Molecule is not provided in supported format.") fname = f"packmol_{dct['name']}.xyz" - assert mol is not None + if mol is None: + raise ValueError("mol is None") mapping[fname] = mol.to(fmt="xyz") if " " in str(fname): file_contents.append(f"structure {fname!r}") diff --git a/src/pymatgen/io/vasp/outputs.py b/src/pymatgen/io/vasp/outputs.py index 53f2e255744..394a8a34124 100644 --- a/src/pymatgen/io/vasp/outputs.py +++ b/src/pymatgen/io/vasp/outputs.py @@ -1589,7 +1589,8 @@ def _parse_dos(elem: XML_Element) -> tuple[Dos, Dos, list[dict]]: pdoss.append(pdos) elem.clear() - assert energies is not None + if energies is None: + raise ValueError("energies is None") return Dos(efermi, energies, tdensities), Dos(efermi, energies, idensities), pdoss @staticmethod @@ -2518,7 +2519,8 @@ def read_cs_raw_symmetrized_tensors(self) -> None: tensor_matrix = [] for line in table_body_text.rstrip().split("\n"): ml = row_pat.search(line) - assert ml is not None + if ml is None: + raise RuntimeError(f"failure to find pattern, {ml=}") processed_line = [float(v) for v in ml.groups()] tensor_matrix.append(processed_line) unsym_tensors.append(tensor_matrix) @@ -4053,16 +4055,19 @@ def _read(self, filename: PathLike, parsed_kpoints: set[tuple[Kpoint]] | None = elif expr.match(line): tokens = line.split() index = int(tokens.pop(0)) - 1 - assert headers is not None + if headers is None: + raise ValueError("headers is None") num_data = np.array([float(t) for t in tokens[: len(headers)]]) - assert phase_factors is not None + if phase_factors is None: + raise ValueError("phase_factors is None") if proj_data_parsed_for_band == 0: data[spin][current_kpoint, current_band, index, :] = num_data elif self.is_soc and proj_data_parsed_for_band < 4: proj_direction = {1: "x", 2: "y", 3: "z"}[proj_data_parsed_for_band] - assert xyz_data is not None + if xyz_data is None: + raise ValueError(f"{xyz_data=}") xyz_data[proj_direction][current_kpoint, current_band, index, :] = num_data elif len(tokens) > len(headers): # note no xyz projected phase factors with SOC @@ -4083,7 +4088,8 @@ def _read(self, filename: PathLike, parsed_kpoints: set[tuple[Kpoint]] | None = elif preamble_expr.match(line): match = preamble_expr.match(line) - assert match is not None + if match is None: + raise RuntimeError(f"Failed to find preamable pattern, {match=}") n_kpoints = int(match[1]) n_bands = int(match[2]) if eigenvalues is None: # first spin @@ -4127,10 +4133,14 @@ def get_projection_on_elements(self, structure: Structure) -> dict[Spin, list[li Returns: A dict as {Spin: [band index][kpoint index][{Element: values}]]. """ - assert self.data is not None, "Data cannot be None." - assert self.nkpoints is not None - assert self.nbands is not None - assert self.nions is not None + if self.data is None: + raise ValueError("data cannot be None.") + if self.nkpoints is None: + raise ValueError("nkpoints cannot be None.") + if self.nbands is None: + raise ValueError("nbands cannot be None.") + if self.nions is None: + raise ValueError("nions cannot be None.") elem_proj: dict[Spin, list] = {} for spin in self.data: @@ -4161,10 +4171,12 @@ def get_occupation(self, atom_index: int, orbital: str) -> dict: Returns: Sum occupation of orbital of atom. """ - assert self.orbitals is not None + if self.orbitals is None: + raise ValueError("orbitals is None") orbital_index = self.orbitals.index(orbital) - assert self.data is not None + if self.data is None: + raise ValueError("data is None") return { spin: np.sum(data[:, :, atom_index, orbital_index] * self.weights[:, None]) # type: ignore[call-overload] for spin, data in self.data.items() @@ -4396,7 +4408,8 @@ def __init__( else: coords_str.append(line) - assert preamble is not None + if preamble is None: + raise ValueError("preamble is None") poscar = Poscar.from_str("\n".join([*preamble, "Direct", *coords_str])) if ( (ionicstep_end is None and ionicstep_cnt >= ionicstep_start) @@ -4480,7 +4493,8 @@ def concatenate( else: coords_str.append(line) - assert preamble is not None + if preamble is None: + raise ValueError("preamble is None") poscar = Poscar.from_str("\n".join([*preamble, "Direct", *coords_str])) if ( @@ -5575,7 +5589,8 @@ def from_file(cls, filename: str) -> Self: terminate_on_match=False, postprocess=float, )["data"] - assert len(data_res) == nspin * nkpoints * nbands * nbands + if len(data_res) != nspin * nkpoints * nbands * nbands: + raise ValueError("incorrect length of data_res") data = np.array([complex(real_part, img_part) for (real_part, img_part), _ in data_res]) diff --git a/src/pymatgen/io/vasp/sets.py b/src/pymatgen/io/vasp/sets.py index d2e6aa7a18e..1b80f08e607 100644 --- a/src/pymatgen/io/vasp/sets.py +++ b/src/pymatgen/io/vasp/sets.py @@ -342,15 +342,19 @@ def write_input( zip_output (bool): If True, output will be zipped into a file with the same name as the InputSet (e.g., MPStaticSet.zip). """ + vasp_input = None try: vasp_input = self.get_input_set(potcar_spec=potcar_spec) - except PMG_VASP_PSP_DIR_Error: - assert potcar_spec is False - raise ValueError( - "PMG_VASP_PSP_DIR is not set." - " Please set the PMG_VASP_PSP_DIR in .pmgrc.yaml" - " or use potcar_spec=True argument." - ) + except PMG_VASP_PSP_DIR_Error as exc: + if not potcar_spec: + raise ValueError( + "PMG_VASP_PSP_DIR is not set." + " Please set the PMG_VASP_PSP_DIR in .pmgrc.yaml" + " or use potcar_spec=True argument." + ) from exc + + if vasp_input is None: + raise ValueError("vasp_input is None") cif_name = None if include_cif: @@ -1900,7 +1904,8 @@ def structure(self, structure: Structure | None) -> None: # Project MAGMOM to z-axis structure = structure.copy(site_properties={"magmom": [[0, 0, site.magmom] for site in structure]}) - assert VaspInputSet.structure is not None + if VaspInputSet.structure is None: + raise ValueError("structure is None") VaspInputSet.structure.fset(self, structure) @@ -2180,7 +2185,8 @@ def kpoints_updates(self) -> Kpoints: # attributes aren't going to affect the VASP inputs anyways so # converting the slab into a structure should not matter # use k_product to calculate kpoints, k_product = kpts[0][0] * a - assert self.structure is not None + if self.structure is None: + raise ValueError("structure is None") lattice_abc = self.structure.lattice.abc kpt_calc = [ int(self.k_product / lattice_abc[0] + 0.5), @@ -2384,7 +2390,8 @@ def write_input( if make_dir_if_not_present and not output_dir.exists(): output_dir.mkdir(parents=True) self.incar.write_file(str(output_dir / "INCAR")) - assert self.kpoints is not None + if self.kpoints is None: + raise ValueError("kpoints is None") self.kpoints.write_file(str(output_dir / "KPOINTS")) self.potcar.write_file(str(output_dir / "POTCAR")) @@ -2400,7 +2407,8 @@ def write_input( for image in ("00", str(len(self.structures) - 1).zfill(2)): end_point_param.incar.write_file(str(output_dir / image / "INCAR")) - assert end_point_param.kpoints is not None + if end_point_param.kpoints is None: + raise ValueError("kpoints of end_point_param is None") end_point_param.kpoints.write_file(str(output_dir / image / "KPOINTS")) end_point_param.potcar.write_file(str(output_dir / image / "POTCAR")) if write_path_cif: @@ -2580,7 +2588,8 @@ class MVLNPTMDSet(VaspInputSet): def incar_updates(self) -> dict[str, Any]: """Updates to the INCAR config for this calculation type.""" # NPT-AIMD default settings - assert self.structure is not None + if self.structure is None: + raise ValueError("structure is None") updates = { "ALGO": "Fast", "ISIF": 3, diff --git a/src/pymatgen/phonon/bandstructure.py b/src/pymatgen/phonon/bandstructure.py index 1d836714f2a..70ef51bd2b6 100644 --- a/src/pymatgen/phonon/bandstructure.py +++ b/src/pymatgen/phonon/bandstructure.py @@ -516,7 +516,8 @@ def as_phononwebsite(self) -> dict: """Return a dictionary with the phononwebsite format: https://henriquemiranda.github.io/phononwebsite. """ - assert self.structure is not None, "Structure is required for as_phononwebsite" + if self.structure is None: + raise RuntimeError("Structure is required for as_phononwebsite") dct = {} # define the lattice @@ -601,11 +602,12 @@ def band_reorder(self) -> None: order = np.zeros([n_qpoints, n_phonons], dtype=np.int64) order[0] = np.array(range(n_phonons)) - # get the atomic masses - assert self.structure is not None, "Structure is required for band_reorder" + # Get the atomic masses + if self.structure is None: + raise RuntimeError("Structure is required for band_reorder") atomic_masses = [site.specie.atomic_mass for site in self.structure] - # get order + # Get order for nq in range(1, n_qpoints): old_eig_vecs = eigenvectors_from_displacements(eigen_displacements[:, nq - 1], atomic_masses) new_eig_vecs = eigenvectors_from_displacements(eigen_displacements[:, nq], atomic_masses) diff --git a/src/pymatgen/phonon/gruneisen.py b/src/pymatgen/phonon/gruneisen.py index a4aee2cc443..b1a891f1399 100644 --- a/src/pymatgen/phonon/gruneisen.py +++ b/src/pymatgen/phonon/gruneisen.py @@ -119,7 +119,8 @@ def average_gruneisen( raise ValueError(f"{limit_frequencies} is not an accepted value for limit_frequencies.") weights = self.multiplicities - assert weights is not None, "Multiplicities are not defined." + if weights is None: + raise ValueError("Multiplicities are not defined.") g = np.dot(weights[ind[0]], np.multiply(cv, gamma)[ind]).sum() / np.dot(weights[ind[0]], cv[ind]).sum() if squared: @@ -153,7 +154,8 @@ def thermal_conductivity_slack( Returns: The value of the thermal conductivity in W/(m*K) """ - assert self.structure is not None, "Structure is not defined." + if self.structure is None: + raise ValueError("Structure is not defined.") average_mass = np.mean([s.specie.atomic_mass for s in self.structure]) * amu_to_kg if theta_d is None: theta_d = self.acoustic_debye_temp @@ -214,7 +216,8 @@ def debye_temp_phonopy(self, freq_max_fit=None) -> float: Returns: Debye temperature in K. """ - assert self.structure is not None, "Structure is not defined." + if self.structure is None: + raise ValueError("Structure is not defined.") # Use of phonopy classes to compute Debye frequency t = self.tdos t.set_Debye_frequency(num_atoms=len(self.structure), freq_max_fit=freq_max_fit) @@ -227,7 +230,8 @@ def acoustic_debye_temp(self) -> float: """Acoustic Debye temperature in K, i.e. the Debye temperature divided by n_sites**(1/3). Adapted from abipy. """ - assert self.structure is not None, "Structure is not defined." + if self.structure is None: + raise ValueError("Structure is not defined.") return self.debye_temp_limit / len(self.structure) ** (1 / 3) diff --git a/src/pymatgen/phonon/plotter.py b/src/pymatgen/phonon/plotter.py index af6ffc6faad..02642c04544 100644 --- a/src/pymatgen/phonon/plotter.py +++ b/src/pymatgen/phonon/plotter.py @@ -458,28 +458,34 @@ def get_proj_plot( rgb_labels: a list of rgb colors for the labels; if not specified, the colors will be automatically generated. """ - assert self._bs.structure is not None, "Structure is required for get_proj_plot" + if self._bs.structure is None: + raise ValueError("Structure is required for get_proj_plot") elements = [elem.symbol for elem in self._bs.structure.elements] if site_comb == "element": - assert 2 <= len(elements) <= 4, "the compound must have 2, 3 or 4 unique elements" + if len(elements) not in {2, 3, 4}: + raise ValueError("the compound must have 2, 3 or 4 unique elements") indices: list[list[int]] = [[] for _ in range(len(elements))] for idx, elem in enumerate(self._bs.structure.species): for j, unique_species in enumerate(self._bs.structure.elements): if elem == unique_species: indices[j].append(idx) else: - assert isinstance(site_comb, list) - assert 2 <= len(site_comb) <= 4, "the length of site_comb must be 2, 3 or 4" + if not isinstance(site_comb, list): + raise TypeError("Site_comb should be a list.") + if len(site_comb) not in {2, 3, 4}: + raise ValueError("the length of site_comb must be 2, 3 or 4") all_sites = self._bs.structure.sites all_indices = {*range(len(all_sites))} for comb in site_comb: for idx in comb: - assert 0 <= idx < len(all_sites), "one or more indices in site_comb does not exist" + if not 0 <= idx < len(all_sites): + raise RuntimeError("one or more indices in site_comb does not exist") all_indices.remove(idx) if len(all_indices) != 0: raise ValueError(f"not all {len(all_sites)} indices are included in site_comb") indices = site_comb # type: ignore[assignment] - assert rgb_labels is None or len(rgb_labels) == len(indices), "wrong number of rgb_labels" + if rgb_labels is not None and len(rgb_labels) != len(indices): + raise ValueError("wrong number of rgb_labels") u = freq_units(units) _fig, ax = plt.subplots(figsize=(12, 8), dpi=300) @@ -663,8 +669,8 @@ def plot_compare( _colors = ("blue", "red", "green", "orange", "purple", "brown", "pink", "gray", "olive") if isinstance(other_plotter, PhononBSPlotter): other_plotter = {other_plotter._label or "other": other_plotter} - if colors: - assert len(colors) == len(other_plotter) + 1, "Wrong number of colors" + if colors and len(colors) != len(other_plotter) + 1: + raise ValueError("Wrong number of colors") self_data = self.bs_plot_data() diff --git a/src/pymatgen/symmetry/groups.py b/src/pymatgen/symmetry/groups.py index 21c535c5099..a5b84bc1225 100644 --- a/src/pymatgen/symmetry/groups.py +++ b/src/pymatgen/symmetry/groups.py @@ -357,7 +357,8 @@ def _generate_full_symmetry_ops(self) -> np.ndarray: gen_ops.append(op) symm_ops = np.append(symm_ops, [op], axis=0) new_ops = gen_ops # type: ignore[assignment] - assert len(symm_ops) == self.order + if len(symm_ops) != self.order: + raise ValueError("Symmetry operations and its order mismatch.") return symm_ops @classmethod diff --git a/src/pymatgen/transformations/advanced_transformations.py b/src/pymatgen/transformations/advanced_transformations.py index f3ff488a358..28a9750dc73 100644 --- a/src/pymatgen/transformations/advanced_transformations.py +++ b/src/pymatgen/transformations/advanced_transformations.py @@ -1079,8 +1079,8 @@ def apply_transformation(self, structure: Structure, return_ranked_list: bool | else: sp_to_remove = min(supercell.composition, key=lambda el: el.X) # Confirm species are of opposite oxidation states. - assert sp_to_remove.oxi_state * sp.oxi_state < 0 # type: ignore[operator] - + if sp_to_remove.oxi_state * sp.oxi_state >= 0: # type: ignore[operator] + raise ValueError("Species should be of opposite oxidation states.") ox_diff = int(abs(round(sp.oxi_state - ox))) anion_ox = int(abs(sp_to_remove.oxi_state)) # type: ignore[arg-type] nx = supercell.composition[sp_to_remove] diff --git a/src/pymatgen/util/coord.py b/src/pymatgen/util/coord.py index 1ddc41e4ea9..e31c1466eb8 100644 --- a/src/pymatgen/util/coord.py +++ b/src/pymatgen/util/coord.py @@ -301,7 +301,8 @@ def lattice_points_in_supercell(supercell_matrix): frac_points = np.dot(all_points, np.linalg.inv(supercell_matrix)) t_vecs = frac_points[np.all(frac_points < 1 - 1e-10, axis=1) & np.all(frac_points >= -1e-10, axis=1)] - assert len(t_vecs) == round(abs(np.linalg.det(supercell_matrix))) + if len(t_vecs) != round(abs(np.linalg.det(supercell_matrix))): + raise ValueError("The number of transformed vectors mismatch.") return t_vecs @@ -449,7 +450,8 @@ def line_intersection(self, point1: Sequence[float], point2: Sequence[float], to break if not found: barys.append(p) - assert len(barys) < 3, "More than 2 intersections found" + if len(barys) >= 3: + raise ValueError("More than 2 intersections found") return [self.point_from_bary_coords(b) for b in barys] def __eq__(self, other: object) -> bool: diff --git a/src/pymatgen/util/testing/__init__.py b/src/pymatgen/util/testing/__init__.py index 1842f2a0c02..acf83e32c93 100644 --- a/src/pymatgen/util/testing/__init__.py +++ b/src/pymatgen/util/testing/__init__.py @@ -118,9 +118,10 @@ def serialize_with_pickle(self, objects: Any, protocols: Sequence[int] | None = # Test for equality if test_eq: for orig, unpickled in zip(objects, unpickled_objs, strict=True): - assert ( - orig == unpickled - ), f"Unpickled and original objects are unequal for {protocol=}\n{orig=}\n{unpickled=}" + if orig != unpickled: + raise ValueError( + f"Unpickled and original objects are unequal for {protocol=}\n{orig=}\n{unpickled=}" + ) # Save the deserialized objects and test for equality. objects_by_protocol.append(unpickled_objs) @@ -139,10 +140,12 @@ def assert_msonable(self, obj: MSONable, test_is_subclass: bool = True) -> str: By default, the method tests whether obj is an instance of MSONable. This check can be deactivated by setting test_is_subclass=False. """ - if test_is_subclass: - assert isinstance(obj, MSONable) - assert obj.as_dict() == type(obj).from_dict(obj.as_dict()).as_dict() + if test_is_subclass and not isinstance(obj, MSONable): + raise TypeError("obj is not MSONable") + if obj.as_dict() != type(obj).from_dict(obj.as_dict()).as_dict(): + raise ValueError("obj could not be reconstructed accurately from its dict representation.") json_str = json.dumps(obj.as_dict(), cls=MontyEncoder) round_trip = json.loads(json_str, cls=MontyDecoder) - assert issubclass(type(round_trip), type(obj)), f"{type(round_trip)} != {type(obj)}" + if not issubclass(type(round_trip), type(obj)): + raise TypeError(f"{type(round_trip)} != {type(obj)}") return json_str diff --git a/tests/analysis/diffraction/test_xrd.py b/tests/analysis/diffraction/test_xrd.py index 3d4386c4af8..997c43de42b 100644 --- a/tests/analysis/diffraction/test_xrd.py +++ b/tests/analysis/diffraction/test_xrd.py @@ -23,7 +23,7 @@ def test_type_wavelength(self): with pytest.raises(TypeError) as exc: XRDCalculator(wavelength) - assert "type(wavelength)= must be either float, int or str" in str(exc.value) + assert "wavelength_type='list' must be either float, int or str" in str(exc.value) def test_get_pattern(self): struct = self.get_structure("CsCl") diff --git a/tests/core/test_lattice.py b/tests/core/test_lattice.py index 84aa51f5953..7dd83a1227a 100644 --- a/tests/core/test_lattice.py +++ b/tests/core/test_lattice.py @@ -387,7 +387,7 @@ def test_get_points_in_sphere(self): assert len(result) == 4 assert all(len(arr) == 0 for arr in result) types = {*map(type, result)} - assert types == {np.ndarray}, f"Expected only np.ndarray, got {types}" + assert types == {np.ndarray}, f"Expected only np.ndarray, got {[t.__name__ for t in types]}" def test_get_all_distances(self): frac_coords = np.array( diff --git a/tests/io/vasp/test_inputs.py b/tests/io/vasp/test_inputs.py index c7be8c61ad5..59a3f038fd6 100644 --- a/tests/io/vasp/test_inputs.py +++ b/tests/io/vasp/test_inputs.py @@ -547,7 +547,7 @@ def test_init(self): def test_copy(self): incar2 = self.incar.copy() - assert isinstance(incar2, Incar), f"Expected Incar, got {type(incar2)}" + assert isinstance(incar2, Incar), f"Expected Incar, got {type(incar2).__name__}" assert incar2 == self.incar # modify incar2 and check that incar1 is not modified incar2["LDAU"] = "F"