Skip to content

Commit

Permalink
Merge pull request #30 from Vivswan/next
Browse files Browse the repository at this point in the history
v1.0.5
  • Loading branch information
Vivswan authored May 15, 2023
2 parents f5d049e + 14f5240 commit 7d94fa2
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 31 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# Changelog

## 1.0.5 (Patches for Pytorch 2.0.1)

* Removed unnecessary `PseudoParameter.grad` property.
* Patch for Pytorch 2.0.1, add filtering inputs in `BackwardGraph._calculate_gradients`.

## 1.0.4

* Combined `PseudoParameter` and `PseudoParameterModule` for better visibility
Expand Down
17 changes: 9 additions & 8 deletions analogvnn/graph/BackwardGraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,6 @@ def _calculate_gradients( # noqa: C901
grad_inputs = module._call_impl_backward(*grad_outputs.inputs.args, **grad_outputs.inputs.kwargs)
return ArgsKwargs.to_args_kwargs_object(grad_inputs)

grad_dict = {}
inputs = module_inputs_outputs.inputs.args + list(module_inputs_outputs.inputs.kwargs.values())
outputs = []
outputs_grads = []
Expand All @@ -291,26 +290,28 @@ def _calculate_gradients( # noqa: C901
module_parameters = list(module.parameters())
inputs += module_parameters

grad_dict = {id(i): None for i in inputs}
filtered_inputs = [i for i in inputs if i is not None and i.requires_grad]
out_grads = torch.autograd.grad(
outputs=outputs,
inputs=inputs,
inputs=filtered_inputs,
grad_outputs=outputs_grads,
retain_graph=True,
allow_unused=True
)
for i, v in enumerate(out_grads):
grad_dict[inputs[i]] = v
grad_dict[id(filtered_inputs[i])] = v

for i in module_parameters:
if grad_dict[i] is None:
if grad_dict[id(i)] is None:
continue

if i.grad is None:
i.grad = grad_dict[i]
i.grad = grad_dict[id(i)]
else:
i.grad += grad_dict[i]
i.grad += grad_dict[id(i)]

return ArgsKwargs(
args=[grad_dict[i] for i in module_inputs_outputs.inputs.args],
kwargs={key: grad_dict[value] for key, value in module_inputs_outputs.inputs.kwargs.items()}
args=[grad_dict[id(i)] for i in module_inputs_outputs.inputs.args],
kwargs={key: grad_dict[id(value)] for key, value in module_inputs_outputs.inputs.kwargs.items()}
)
24 changes: 2 additions & 22 deletions analogvnn/parameter/PseudoParameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,26 +122,6 @@ def __repr__(self):
f', original={self.original}' \
f')'

@property
def grad(self):
"""Returns the gradient of the parameter.
Returns:
Tensor: the gradient.
"""

return self._transformed.grad

@grad.setter
def grad(self, grad: Tensor):
"""Sets the gradient of the parameter.
Args:
grad (Tensor): the gradient.
"""

self._transformed.grad = grad

@property
def transformation(self):
"""Returns the transformation.
Expand Down Expand Up @@ -187,8 +167,8 @@ def substitute_member(
"""Substitutes a member of a tensor as property of another tensor.
Args:
tensor_from (Any): the tensor to substitute from.
tensor_to (Any): the tensor to substitute to.
tensor_from (Any): the tensor property to substitute.
tensor_to (Any): the tensor property to substitute to.
property_name (str): the name of the property.
setter (bool): whether to substitute the setter.
"""
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ py-modules = ['analogvnn']
[project]
# $ pip install analogvnn
name = "analogvnn"
version = "1.0.4"
version = "1.0.5"
description = "A fully modular framework for modeling and optimizing analog/photonic neural networks" # Optional
readme = "README.md"
requires-python = ">=3.7"
Expand Down

0 comments on commit 7d94fa2

Please sign in to comment.