From 38a966c4d6266dc59b3f44e3d01d6fd8bf8a61d9 Mon Sep 17 00:00:00 2001 From: Vivswan Shah <58091053+Vivswan@users.noreply.github.com> Date: Wed, 22 Nov 2023 01:40:20 -0500 Subject: [PATCH] GeLU backward eq fix --- CHANGELOG.md | 3 +++ analogvnn/nn/activation/Gaussian.py | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3058eb4..724fcd0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 1.0.7 +* Fixed `GeLU` backward function equation. + ## 1.0.6 * `Model` is subclass of `BackwardModule` for additional functionality. diff --git a/analogvnn/nn/activation/Gaussian.py b/analogvnn/nn/activation/Gaussian.py index 2d0661e..d18ec81 100644 --- a/analogvnn/nn/activation/Gaussian.py +++ b/analogvnn/nn/activation/Gaussian.py @@ -68,6 +68,6 @@ def backward(self, grad_output: Optional[Tensor]) -> Optional[Tensor]: x = self.inputs grad = (1 / 2) * ( - (1 + torch.erf(x / math.sqrt(2))) + x * ((2 / math.sqrt(math.pi)) * torch.exp(-torch.pow(x, 2))) + (1 + torch.erf(x / math.sqrt(2))) + x * (math.sqrt(2 * math.pi) * torch.exp(-torch.pow(x, 2) / 2)) ) return grad_output * grad diff --git a/pyproject.toml b/pyproject.toml index e135a42..e04526a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ where = ["analogvnn"] [project] # $ pip install analogvnn name = "analogvnn" -version = "1.0.6" +version = "1.0.7" description = "A fully modular framework for modeling and optimizing analog/photonic neural networks" readme = "README.md" requires-python = ">=3.7" diff --git a/requirements.txt b/requirements.txt index 95f993f..82526c1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ ---extra-index-url https://download.pytorch.org/whl/cu118 +--extra-index-url https://download.pytorch.org/whl/cu121 torch torchvision torchaudio