forked from keras-team/keras-io
-
Notifications
You must be signed in to change notification settings - Fork 1
/
change_activation_functions.py
82 lines (66 loc) · 2.75 KB
/
change_activation_functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# Where do I change the activation function?
def ResidualBlock(width):
def apply(x):
input_width = x.shape[3]
if input_width == width:
residual = x
else:
residual = layers.Conv2D(width, kernel_size=1)(x)
x = layers.BatchNormalization(center=False, scale=False)(x)
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="swish")(x) # try to change this layer
x = layers.Conv2D(width, kernel_size=3, padding="same")(x)
x = layers.Add()([x, residual])
return x
return apply
#############################################################
# Sigmoid
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="sigmoid")(x)
#############################################################
# Softmax
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="softmax")(x)
#############################################################
# Tanh
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="tanh")(x)
#############################################################
# ReLU
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="relu")(x)
#############################################################
# Leaky ReLU
x = layers.Conv2D(width, kernel_size=3, padding="same")(x)
# Apply LeakyReLU activation
x = layers.LeakyReLU()(x)
#############################################################
# PReLU
from tensorflow.keras.layers import PReLU
x = layers.Conv2D(width, kernel_size=3, padding="same")(x)
x = PReLU()(x)
#############################################################
# ELU
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="elu")(x)
#############################################################
# Softplus
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="softplus")(x)
#############################################################
# Swish
x = layers.Conv2D(width, kernel_size=3, padding="same", activation="swish")(x) # default
#############################################################
# Mish
import tensorflow as tf
from tensorflow.keras import layers
# Mish 활성화 함수 정의
def mish(x):
return x * tf.math.tanh(tf.math.softplus(x))
def ResidualBlock(width):
def apply(x):
input_width = x.shape[3]
if input_width == width:
residual = x
else:
residual = layers.Conv2D(width, kernel_size=1)(x)
x = layers.BatchNormalization(center=False, scale=False)(x)
# Conv2D의 activation을 mish로 변경
x = layers.Conv2D(width, kernel_size=3, padding="same", activation=mish)(x)
x = layers.Conv2D(width, kernel_size=3, padding="same")(x)
x = layers.Add()([x, residual])
return x
return apply