Skip to content

Commit

Permalink
Merge pull request #22 from Vivswan/next
Browse files Browse the repository at this point in the history
v1.0.2: Bugfix: removed  `graph` from `Layer` class
  • Loading branch information
Vivswan authored May 8, 2023
2 parents cbbd394 + eaa3ea8 commit 12d1f39
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 13 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,15 @@
# Changelog

## 1.0.0

* Public release

## 1.0.1 (Patches for Pytorch 2.0.0)

* added `grad.setter` to `PseudoParameterModule` class

## 1.0.2

* Bugfix: removed `graph` from `Layer` class
* `graph` was causing issues with nested `Model` objects
* Now `_use_autograd_graph` is directly set while compiling the `Model` object
12 changes: 1 addition & 11 deletions analogvnn/nn/module/Layer.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import functools
from typing import Union, Type, Callable, Sequence, Optional, TYPE_CHECKING, Set, Iterator, Tuple
from typing import Union, Type, Callable, Sequence, Optional, Set, Iterator, Tuple

from torch import nn, Tensor

Expand All @@ -10,9 +10,6 @@
from analogvnn.graph.ArgsKwargs import ArgsKwargs, ArgsKwargsOutput
from analogvnn.utils.common_types import TENSORS

if TYPE_CHECKING:
from analogvnn.graph.ModelGraph import ModelGraph

__all__ = ['Layer']


Expand Down Expand Up @@ -63,7 +60,6 @@ class Layer(nn.Module):
_outputs (Union[None, Tensor, Sequence[Tensor]]): Outputs of the layer.
_backward_module (Optional[BackwardModule]): Backward module of the layer.
_use_autograd_graph (bool): If True, the autograd graph is used to calculate the gradients.
graphs (Optional[ModelGraph]): Contains Forward and Backward Graphs of the layer.
call_super_init (bool): If True, the super class __init__ of nn.Module is called
https://github.com/pytorch/pytorch/pull/91819
"""
Expand All @@ -72,7 +68,6 @@ class Layer(nn.Module):
_outputs: Union[None, Tensor, Sequence[Tensor]]
_backward_module: Optional[BackwardModule]
_use_autograd_graph: bool
graphs: Optional[ModelGraph]

# https://github.com/pytorch/pytorch/pull/91819
call_super_init: bool = True
Expand All @@ -84,7 +79,6 @@ def __init__(self):
self._outputs = None
self._backward_module = None
self._use_autograd_graph = False
self.graphs = None

def __call__(self, *inputs, **kwargs):
"""Calls the forward pass of neural network layer.
Expand All @@ -110,8 +104,6 @@ def use_autograd_graph(self) -> bool:
bool: use_autograd_graph.
"""

if self.graphs is not None:
return self.graphs.use_autograd_graph
return self._use_autograd_graph

@use_autograd_graph.setter
Expand All @@ -123,8 +115,6 @@ def use_autograd_graph(self, use_autograd_graph: bool):
"""

self._use_autograd_graph = use_autograd_graph
if self.graphs is not None:
self.graphs.use_autograd_graph = use_autograd_graph

@property
def inputs(self) -> ArgsKwargsOutput:
Expand Down
2 changes: 1 addition & 1 deletion analogvnn/nn/module/Model.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def compile(self, device: Optional[torch.device] = None, layer_data: bool = True
self.graphs.compile()
for i in self.modules():
if isinstance(i, Layer) and i != self:
i.graphs = self.graphs
i.use_autograd_graph = self.use_autograd_graph

self.to(device=self.device)

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ py-modules = ['analogvnn']
[project]
# $ pip install analogvnn
name = "analogvnn"
version = "1.0.1"
version = "1.0.2"
description = "A fully modular framework for modeling and optimizing analog/photonic neural networks" # Optional
readme = "README.md"
requires-python = ">=3.7"
Expand Down

0 comments on commit 12d1f39

Please sign in to comment.