-
Notifications
You must be signed in to change notification settings - Fork 2
/
activations.py
66 lines (54 loc) · 1.48 KB
/
activations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import numpy as np
################################################
# Activations
################################################
class Sigmoid():
def __init__(self, c=1, b=0):
self.c = c
self.b = b
def value(self, x):
val = 1 + np.exp(-self.c*(x + self.b))
return 1/val
def diff(self, x, remove=False):
y = self.value(x)
if remove==True:
y = y[:-1,:]
val = self.c*y*(1-y)
return val
class Tanh():
def __init__(self):
pass
def value(self, x):
num = np.exp(x) - np.exp(-x)
denom = np.exp(x) + np.exp(-x)
return num/denom
def diff(self, x):
y = self.value(x)
val = 1 - y**2
return val
class Relu():
def __init__(self):
pass
def value(self, x):
val = x
val[val<0] = 0
return val
def diff(self, x):
val = np.ones(x.shape)
val[val<=0] = 0
return val
class Softmax():
def __init__(self):
pass
def value(self, x):
val = np.exp(x)/np.sum(np.exp(x), axis=0)
# print("X shape", x.shape, "Val shape", val.shape)
return val
def diff(self, x):
y = self.value(x)
# print("Y shape:", y.shape)
# Refernce for checking examples:
# https://aimatters.wordpress.com/2019/06/17/the-softmax-function-derivative/
mat = np.tile(y, y.shape[0])
val = np.diag(y.reshape(-1,)) - (mat*mat.T)
return val