-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathneuron_network.py
83 lines (68 loc) · 2.71 KB
/
neuron_network.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import tensorflow as tf
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Flatten, Dense, Dropout
import matplotlib.pyplot as plt
########################################################################################################
### MODEL PREPARE
model = Sequential()
model.add(Flatten(input_shape=(28, 28)))
model.add(Dense(512, activation='sigmoid'))
model.add(Dropout(0.2))
model.add(Dense(256, activation='sigmoid'))
model.add(Dropout(0.2))
model.add(Dense(128, activation='sigmoid'))
model.add(Dropout(0.2))
model.add(Dense(64, activation='sigmoid'))
model.add(Dropout(0.2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
################################# MODEL TRAIN ######################################################
def start(X_train, y_train, epchos, verbose, batch_size, validation_split):
history = model.fit(X_train,
y_train,
epochs=epchos,
verbose=verbose,
batch_size=batch_size,
validation_split = validation_split)
return history
def evaluate(X_val, y_val, batch_size):
model.evaluate(X_val, y_val, batch_size=batch_size)
################################# PLOTS ###################################################
def draw_curves(history, key1='accuracy', ylim1=(0.8, 1.00),
key2='loss', ylim2=(0.0, 1.0)):
plt.figure(figsize=(12, 4))
plt.subplot(1, 2, 1)
plt.plot(history.history[key1], "r--")
plt.plot(history.history['val_' + key1], "g--")
plt.ylabel(key1)
plt.xlabel('Epoch')
plt.ylim(ylim1)
plt.legend(['train', 'test'], loc='best')
plt.subplot(1, 2, 2)
plt.plot(history.history[key2], "r--")
plt.plot(history.history['val_' + key2], "g--")
plt.ylabel(key2)
plt.xlabel('Epoch')
plt.ylim(ylim2)
plt.legend(['train', 'test'], loc='best')
plt.show()
def model_summary():
model.summary()
#model_summary()
################ TESTING #########################################
# labels = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat', 'Sandal', 'Shirt', 'Sneaker',
# 'Bag', 'Ankle boot']
# fashion_mnist = tf.keras.datasets.fashion_mnist
# (X_train, y_train), (X_val, y_val) = fashion_mnist.load_data()
#
# X_train = X_train.astype('float32') / 255.0
# X_val = X_val.astype('float32') / 255.0
# y_train = to_categorical(y_train, len(labels))
# y_val = to_categorical(y_val, len(labels))
#
# history = start(X_train, y_train, 50, 1, 32, 0.2)
# evaluate(X_val, y_val, batch_size=32)
# draw_curves(history=history)