-
Notifications
You must be signed in to change notification settings - Fork 1
/
linearLayer.py
49 lines (34 loc) · 1.1 KB
/
linearLayer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#Author : Youness Landa
import numpy as np
from utils import softmax
class ClassificationLinear:
def __init__(self, input_size, output_size):
rnd = np.random.RandomState(seed=1)
self.W = rnd.randn(input_size, output_size) * 0.1
self.b = rnd.randn(output_size) * 0.1
def get_params(self):
return (self.W, self.b)
def forward(self, x):
W, b = self.get_params()
O = W.shape[1]
T, _ = x.shape
y_pred = np.zeros([T, O])
loss = 0
for t in range(T):
y_pred[t] = softmax(np.dot(x[t], W) + b)
fwd_x = x
return y_pred, fwd_x
def backward(self, deltas, fwd_x):
W, b = self.get_params()
x = fwd_x
I = W.shape[0]
T, O = deltas.shape
dx = np.zeros([T, I])
gradients = [np.zeros_like(w) for w in self.get_params()]
dW, db = gradients
for t in reversed(range(T)):
dx[t] = np.dot(deltas[t], W.T)
dW += np.outer(x[t], deltas[t])
db += deltas[t]
gradients = dW, db
return dx, gradients