Skip to content

Commit

Permalink
fix: Pre-commit fixes, docs, unused, typing
Browse files Browse the repository at this point in the history
  • Loading branch information
eddiebergman committed Sep 17, 2024
1 parent 11341ee commit b560734
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 84 deletions.
4 changes: 2 additions & 2 deletions neps/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from neps.api import run
from neps.plot.plot import plot
from neps.plot.tensorboard_eval import tblogger
from neps.search_spaces import (
ArchitectureParameter,
CategoricalParameter,
Expand All @@ -12,7 +13,6 @@
IntegerParameter,
)
from neps.status.status import get_summary_dict, status
from neps.plot.tensorboard_eval import tblogger

Integer = IntegerParameter
Float = FloatParameter
Expand All @@ -39,5 +39,5 @@
"GraphGrammar",
"GraphGrammarCell",
"GraphGrammarRepetitive",
"tblogger"
"tblogger",
]
2 changes: 1 addition & 1 deletion neps/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
T = TypeVar("T")
V = TypeVar("V")

ENV_VARS_USED: dict[str, tuple[str, Any]] = {}
ENV_VARS_USED: dict[str, tuple[Any, Any]] = {}


def get_env(key: str, parse: Callable[[str], T], default: V) -> T | V:
Expand Down
4 changes: 1 addition & 3 deletions neps/optimizers/multi_fidelity/ifbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from neps.state.optimizer import BudgetInfo
from neps.utils.types import ConfigResult
from neps.utils.common import instance_from_map, EvaluationData
from neps.utils.common import instance_from_map
from neps.search_spaces.search_space import FloatParameter, IntegerParameter, SearchSpace
from neps.optimizers.base_optimizer import BaseOptimizer
from neps.optimizers.bayesian_optimization.acquisition_functions import AcquisitionMapping
Expand Down Expand Up @@ -170,8 +170,6 @@ def __init__(
)
self.count = 0

self.evaluation_data = EvaluationData()

def _adjust_fidelity_for_freeze_thaw_steps(
self, pipeline_space: SearchSpace, step_size: int
) -> SearchSpace:
Expand Down
15 changes: 13 additions & 2 deletions neps/plot/plot3D.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Plot a 3D landscape of learning curves for a given run."""

from __future__ import annotations

from dataclasses import dataclass
Expand All @@ -20,6 +22,8 @@

@dataclass
class Plotter3D:
"""Plot a 3d landscape of learning curves for a given run."""

loss_key: str = "Loss"
fidelity_key: str = "epochs"
run_path: str | Path | None = None
Expand All @@ -30,7 +34,7 @@ class Plotter3D:
bck_color_2d: tuple[float, float, float] = (0.8, 0.82, 0.8)
view_angle: tuple[float, float] = (15, -70)

def __post_init__(self):
def __post_init__(self) -> None:
if self.run_path is not None:
assert (
Path(self.run_path).absolute().is_dir()
Expand All @@ -51,22 +55,27 @@ def __post_init__(self):

@staticmethod
def get_x(df: pd.DataFrame) -> np.ndarray:
"""Get the x-axis values for the plot."""
return df["epochID"].to_numpy()

@staticmethod
def get_y(df: pd.DataFrame) -> np.ndarray:
"""Get the y-axis values for the plot."""
y_ = df["configID"].to_numpy()
return np.ones_like(y_) * y_[0]

@staticmethod
def get_z(df: pd.DataFrame) -> np.ndarray:
"""Get the z-axis values for the plot."""
return df["result.loss"].to_numpy()

@staticmethod
def get_color(df: pd.DataFrame) -> np.ndarray:
"""Get the color values for the plot."""
return df.index.to_numpy()

def prep_df(self, df: pd.DataFrame | None = None) -> pd.DataFrame:
"""Prepare the dataframe for plotting."""
df = self.df if df is None else df

_fid_key = f"config.{self.fidelity_key}"
Expand All @@ -84,12 +93,13 @@ def prep_df(self, df: pd.DataFrame | None = None) -> pd.DataFrame:
time_cols = ["metadata.time_started", "metadata.time_end"]
return df.sort_values(by=time_cols).reset_index(drop=True)

def plot3D(
def plot3D( # noqa: N802, PLR0915
self,
data: pd.DataFrame | None = None,
save_path: str | Path | None = None,
filename: str = "freeze_thaw",
) -> None:
"""Plot the 3D landscape of learning curves."""
data = self.prep_df(data)

# Create the figure and the axes for the plot
Expand Down Expand Up @@ -228,6 +238,7 @@ def save(
save_path: str | Path | None = None,
filename: str = "freeze_thaw",
) -> None:
"""Save the plot to a file."""
path = save_path if save_path is not None else self.run_path
assert path is not None

Expand Down
1 change: 1 addition & 0 deletions neps/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ def get_in_progress_trial() -> Trial:


def register_notify_trial_end(key: str, callback: Callable[[Trial], None]) -> None:
"""Register a callback to be called when a trial ends."""
_TRIAL_END_CALLBACKS[key] = callback


Expand Down
16 changes: 9 additions & 7 deletions neps/search_spaces/search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,9 +336,10 @@ def sample(
else:
sampled_hps[name] = hp.sample()
break
except Exception as e:
except Exception as e: # noqa: BLE001
logger.warning(
f"Attempt {attempt + 1}/{patience} failed for sampling {name}: {str(e)}"
f"Attempt {attempt + 1}/{patience} failed for"
f" sampling {name}: {e!s}"
)
else:
logger.error(
Expand All @@ -350,7 +351,7 @@ def sample(
)

return SearchSpace(**sampled_hps)

def mutate(
self,
*,
Expand Down Expand Up @@ -621,8 +622,8 @@ def get_search_space_grid(
Include default hyperparameters in the grid.
If all HPs have a `default` then add a single configuration.
If only partial HPs have defaults then add all combinations of defaults, but only to
the end of the list of configs.
If only partial HPs have defaults then add all combinations of defaults, but
only to the end of the list of configs.
Args:
size_per_numerical_hp: The size of the grid for each numerical hyperparameter.
Expand Down Expand Up @@ -899,8 +900,9 @@ def update_hp_values(self, new_values: dict[str, Any]) -> None:
"""
_hp_dict = self.hp_values()
_intersect = set(_hp_dict.keys()) & set(new_values.keys())
assert len(_intersect) == len(new_values), \
"All hyperparameters must be present! "\
assert len(_intersect) == len(new_values), (
"All hyperparameters must be present! "
f"{set(_hp_dict.keys()) - set(new_values.keys())} are missing"
)
_hp_dict.update(new_values)
self.set_hyperparameters_from_dict(_hp_dict)
69 changes: 0 additions & 69 deletions neps/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,72 +365,3 @@ def instance_from_map( # noqa: C901, PLR0912
raise TypeError(f"{e} when calling {instance} with {args_dict}") from e

return instance


def get_rnd_state() -> dict:
np_state = list(np.random.get_state())
np_state[1] = np_state[1].tolist()
state = {
"random_state": random.getstate(),
"np_seed_state": np_state,
"torch_seed_state": torch.random.get_rng_state().tolist(),
}
if torch.cuda.is_available():
state["torch_cuda_seed_state"] = [
dev.tolist() for dev in torch.cuda.get_rng_state_all()
]
return state


def set_rnd_state(state: dict):
# rnd_s1, rnd_s2, rnd_s3 = state["random_state"]
random.setstate(
tuple(
tuple(rnd_s) if isinstance(rnd_s, list) else rnd_s
for rnd_s in state["random_state"]
)
)
np.random.set_state(tuple(state["np_seed_state"]))
torch.random.set_rng_state(torch.ByteTensor(state["torch_seed_state"]))
if torch.cuda.is_available() and "torch_cuda_seed_state" in state:
torch.cuda.set_rng_state_all(
[torch.ByteTensor(dev) for dev in state["torch_cuda_seed_state"]]
)


class AttrDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__dict__ = self


class DataWriter:
"""A class to specify how to save/write a data to the folder by
implementing your own write_data function.
Use the set_attributes function to set all your necessary attributes and the data
and then write_data will be called with only the directory path as argument
during the write process.
"""

def __init__(self, name: str):
self.name = name

def set_attributes(self, attribute_dict: dict[str, Any]):
for attribute_name, attribute in attribute_dict.items():
setattr(self, attribute_name, attribute)

def write_data(self, to_directory: Path):
raise NotImplementedError


class EvaluationData:
"""A class to store some data for a single evaluation (configuration)
and write that data to its corresponding config folder.
"""

def __init__(self):
self.data_dict: dict[str, DataWriter] = {}

def write_all(self, directory: Path):
for _, data_writer in self.data_dict.items():
data_writer.write_data(directory)
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ exclude = [
"neps/search_spaces/architecture/**/*.py",
"neps/search_spaces/yaml_search_space_utils.py",
"neps/utils/run_args_from_yaml.py",
"neps/utils/common.py",
"neps/api.py",
"tests",
"neps_examples",
Expand Down Expand Up @@ -211,6 +212,7 @@ ignore = [
"COM812", # Require trailing commas, recommended to ignore due to ruff formatter
"PLR2004", # No magic numbers inline
"N817", # CamelCase import as (ignore for ConfigSpace)
"N999", # Invalid name for module
"NPY002", # Replace legacy `np.random.choice` call with `np.random.Generator`
]

Expand Down

0 comments on commit b560734

Please sign in to comment.