-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathactivation.py
150 lines (138 loc) · 3.72 KB
/
activation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import numpy as np
import sys
class Activation:
def __init__(self):
self.X = None
self.Y = None
self.sign = None
def forward(self, X):
self.X = X
class ReLU(Activation):
"""Rectified Linear Unit
"""
def __init__(self):
super().__init__()
def forward(self, X, α=0):
self.sign = (X <= 0)
X[self.sign] = X[self.sign] * α
return X
def backward(self, dY, α=0):
dY[self.sign] = dY[self.sign] * α
return dY
class LReLU(ReLU):
"""Leaky Rectified Linear Unit
"""
def __init__(self):
super().__init__()
def forward(self, X):
return super().forward(X, 0.01)
def backward(self, dY):
return super().backward(dY, 0.01)
class PReLU(ReLU):
"""Parameteric Rectified Linear Unit
"""
def __init__(self):
super().__init__()
self.α = None
def forward(self, X, α):
self.α = α
return super().forward(X, α)
def backward(self, dY):
return super().backward(dY, self.α)
class ELU(Activation):
"""Exponential Linear Unit
"""
def __init__(self):
super().__init__()
self.α = None
def forward(self, X, α, λ=1.0):
self.α = α
X = λ * X
self.sign = (X <= 0)
X[self.sign] = α * (np.exp(X[self.sign]) - 1.0)
self.Y = X
return X
def backward(self, dY, λ=1.0):
dY = λ * dY
dY[self.sign] = dY[self.sign] * (self.Y + self.α)
return dY
class SELU(ELU):
"""Scaled Exponential Linear Unit (Klambauer et al., 2017)
"""
def __init__(self):
super().__init__()
self.α = 1.67326
self.λ = 1.0507
def forward(self, X):
return super().forward(X, self.α, self.λ)
def backward(self, dY):
return super().backward(dY, self.λ)
class Sigmoid(Activation):
"""Logistic Function
"""
def __init__(self):
super().__init__()
def forward(self, X):
self.Y = 1/(1 + np.exp(-X))
return self.Y
def backward(self, dY):
dX = dY * self.Y * (1.0 - self.Y)
return dX
class SoftPlus(Sigmoid):
"""
"""
def __init__(self):
super().__init__()
def forward(self, X):
self.X = X
return np.log(1.0 + np.exp(X))
def backward(self, dY):
dX = dY * super().forward(self.X)
return dX
class Tanh(Activation):
"""
"""
def __init__(self):
super().__init__()
def forward(self, X):
self.Y = 2.0/(1.0 + np.exp(-2 * X) - 1.0)
return self.Y
def backward(self, dY):
dX = dY * (1.0 - (self.Y)**2)
return dX
class ArcTan(Activation):
"""
"""
def __init__(self):
super().__init__()
def forward(self, X):
self.X = X
return np.arctan(X)
def backward(self, dY):
dX = dY/(1.0 + (self.X)**2)
return dX
class SoftSign(Activation):
"""
"""
def __init__(self):
super().__init__()
def forward(self, X):
self.sign = (X < 0)
aX = X.copy()
aX[self.sign] = -1.0 * aX[self.sign]
self.X = aX
return X/(1.0 + aX)
def backward(self, dY):
return dY/(1.0 + self.X)**2
def Softmax(X):
option = X.ndim
if option == 1:
X = X - np.max(X)
return np.exp(X) / np.sum(np.exp(X))
elif option == 2:
X = X.T
X = X - np.max(X, axis=0)
Y = np.exp(X) / np.sum(np.exp(X), axis=0)
return Y.T
else:
sys.stderr.write('unexpected dimention data was given to Softmax function.')