diff --git a/CHANGELOG.md b/CHANGELOG.md index fe0f0cb..1eefad4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,15 @@ # Changelog ## 1.0.0 + * Public release ## 1.0.1 (Patches for Pytorch 2.0.0) + * added `grad.setter` to `PseudoParameterModule` class + +## 1.0.2 + +* Bugfix: removed `graph` from `Layer` class + * `graph` was causing issues with nested `Model` objects + * Now `_use_autograd_graph` is directly set while compiling the `Model` object \ No newline at end of file diff --git a/analogvnn/nn/module/Layer.py b/analogvnn/nn/module/Layer.py index 97e20ce..f31a34b 100644 --- a/analogvnn/nn/module/Layer.py +++ b/analogvnn/nn/module/Layer.py @@ -1,7 +1,7 @@ from __future__ import annotations import functools -from typing import Union, Type, Callable, Sequence, Optional, TYPE_CHECKING, Set, Iterator, Tuple +from typing import Union, Type, Callable, Sequence, Optional, Set, Iterator, Tuple from torch import nn, Tensor @@ -10,9 +10,6 @@ from analogvnn.graph.ArgsKwargs import ArgsKwargs, ArgsKwargsOutput from analogvnn.utils.common_types import TENSORS -if TYPE_CHECKING: - from analogvnn.graph.ModelGraph import ModelGraph - __all__ = ['Layer'] @@ -63,7 +60,6 @@ class Layer(nn.Module): _outputs (Union[None, Tensor, Sequence[Tensor]]): Outputs of the layer. _backward_module (Optional[BackwardModule]): Backward module of the layer. _use_autograd_graph (bool): If True, the autograd graph is used to calculate the gradients. - graphs (Optional[ModelGraph]): Contains Forward and Backward Graphs of the layer. call_super_init (bool): If True, the super class __init__ of nn.Module is called https://github.com/pytorch/pytorch/pull/91819 """ @@ -72,7 +68,6 @@ class Layer(nn.Module): _outputs: Union[None, Tensor, Sequence[Tensor]] _backward_module: Optional[BackwardModule] _use_autograd_graph: bool - graphs: Optional[ModelGraph] # https://github.com/pytorch/pytorch/pull/91819 call_super_init: bool = True @@ -84,7 +79,6 @@ def __init__(self): self._outputs = None self._backward_module = None self._use_autograd_graph = False - self.graphs = None def __call__(self, *inputs, **kwargs): """Calls the forward pass of neural network layer. @@ -110,8 +104,6 @@ def use_autograd_graph(self) -> bool: bool: use_autograd_graph. """ - if self.graphs is not None: - return self.graphs.use_autograd_graph return self._use_autograd_graph @use_autograd_graph.setter @@ -123,8 +115,6 @@ def use_autograd_graph(self, use_autograd_graph: bool): """ self._use_autograd_graph = use_autograd_graph - if self.graphs is not None: - self.graphs.use_autograd_graph = use_autograd_graph @property def inputs(self) -> ArgsKwargsOutput: diff --git a/analogvnn/nn/module/Model.py b/analogvnn/nn/module/Model.py index 20b919c..4d5a99e 100644 --- a/analogvnn/nn/module/Model.py +++ b/analogvnn/nn/module/Model.py @@ -158,7 +158,7 @@ def compile(self, device: Optional[torch.device] = None, layer_data: bool = True self.graphs.compile() for i in self.modules(): if isinstance(i, Layer) and i != self: - i.graphs = self.graphs + i.use_autograd_graph = self.use_autograd_graph self.to(device=self.device) diff --git a/pyproject.toml b/pyproject.toml index 08f55aa..82c47d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ py-modules = ['analogvnn'] [project] # $ pip install analogvnn name = "analogvnn" -version = "1.0.1" +version = "1.0.2" description = "A fully modular framework for modeling and optimizing analog/photonic neural networks" # Optional readme = "README.md" requires-python = ">=3.7"