-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathex2_1_ann_mnist_cl.py
99 lines (79 loc) · 3.24 KB
/
ex2_1_ann_mnist_cl.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# 1.분류 ANN을 위한 인공지능 모델 구현
from keras import layers, models
# 2. 분산 방식 모델링을 포함하는 함수형 구현
def ANN_models_func(Nin, Nh, Nout):
x = layers.Input(shape=(Nin,))
h = layers.Activation('relu')(layers.Dense(Nh)(x))
y = layers.Activation('softmax')(layers.Dense(Nout)(h))
model = models.Model(x, y)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
# 3. # 연쇄 방식 모델링을 포함하는 함수형 구현
def ANN_seq_func(Nin, Nh, Nout):
model = models.Sequential()
model.add(layers.Dense(Nh, activation='relu', input_shape=(Nin,)))
model.add(layers.Dense(Nout, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
return model
# 4. 분산 방식 모델링을 포함하는 객체지향형 구현
class ANN_models_class(models.Model):
def __init__(self, Nin, Nh, Nout):
# Prepare network layers and activate functions
hidden = layers.Dense(Nh)
output = layers.Dense(Nout)
relu = layers.Activation('relu')
softmax = layers.Activation('softmax')
# Connect network elements
x = layers.Input(shape=(Nin,))
h = relu(hidden(x))
y = softmax(output(h))
super().__init__(x, y)
self.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# 5. 연쇄 방식 모델링을 포함하는 객체지향형 구현
class ANN_seq_class(models.Sequential):
def __init__(self, Nin, Nh, Nout):
super().__init__()
self.add(layers.Dense(Nh, activation='relu', input_shape=(Nin,)))
self.add(layers.Dense(Nout, activation='softmax'))
self.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# 6. 분류 ANN에 사용할 데이터 불러오기
import numpy as np
from keras import datasets # mnist
from keras.utils import np_utils # to_categorical
def Data_func():
(X_train, y_train), (X_test, y_test) = datasets.mnist.load_data()
Y_train = np_utils.to_categorical(y_train)
Y_test = np_utils.to_categorical(y_test)
L, W, H = X_train.shape
X_train = X_train.reshape(-1, W * H)
X_test = X_test.reshape(-1, W * H)
X_train = X_train / 255.0
X_test = X_test / 255.0
return (X_train, Y_train), (X_test, Y_test)
# 7. 분류 ANN 학습 결과 그래프 구현
import matplotlib.pyplot as plt
from keraspp.skeras import plot_loss, plot_acc
# 8. 분류 ANN 학습 및 성능 분석
def main():
Nin = 784
Nh = 100
number_of_class = 10
Nout = number_of_class
# model = ANN_models_func(Nin, Nh, Nout)
# model = ANN_seq_func(Nin, Nh, Nout)
# model = ANN_models_class(Nin, Nh, Nout)
model = ANN_seq_class(Nin, Nh, Nout)
(X_train, Y_train), (X_test, Y_test) = Data_func()
history = model.fit(X_train, Y_train, epochs=5, batch_size=100, validation_split=0.2)
performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)
plot_loss(history)
plt.show()
plot_acc(history)
plt.show()
# Run code
if __name__ == '__main__':
main()