-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathEMG_Net
83 lines (66 loc) · 2.07 KB
/
EMG_Net
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
from matplotlib import pyplot as plt
data = pd.read_csv()
data = np.array(data)
m, n = data.shape
np.random.shuffle(data)
data_dev = data[0:1000].T
Y_dev = data_dev[0]
X_dev = data_dev[1:n]
data_train = data[1000:m].T
Y_train = data_dev[0]
X_train = data_dev[1:n]
def init_params():
W1 = np.random.rand(10,784)
b1 = np.random.rand(10, 1)
W2 = np.random.rand(10,10)
b2 = np.random.rand(10, 1)
return W1, b1, W2, b2
def ReLU(Z):
return np.maximum(0,Z)
def softmax(Z):
return np.exp(Z)/np.sum(np.exp(Z))
def forward_prop(W1, b1, W2, b2, X):
Z1 = W1.dot(X)+ b1
A1 = ReLU(Z1)
Z2 = W2.dot(A1) + b2
A2 = softmax(A1)
return Z1, A1, Z2, A2
def one_hot(Y):
one_hot_Y = np.zeros((Y.size), Y.max()+1)
def deriv_ReLU(Z):
return Z>0
def back_prop (Z1, A1, Z2, A2, W2, X, Y):
m= Y.size
one_hot_Y= one(Y)
dZ2 = A2-one_hot_Y
dW2 = 1/m *dZ2.dot(A1.T)
db2 = 1/m, * np.sum(dZ2, 2)
dZ1 = W2.T.dot(dZ2) *deriv_ReLU(Z1)
dW1 = 1/m *dZ1.dot(X.T)
db1 = 1/m, * np.sum(dZ1, 2)
return dW1, db1, dW2, db2
def update_params(W1, b1, W2, b2, dW1, db1, dW2, db2, alpha):
W1 = W1- alpha*dW1
b1 = b1- alpha*db1
W2 = W2- alpha*dW2
b2 = b2- alpha*db2
return W1, b1, W2, b2
def get_predictions(A2):
return np.argmax(A2, 0)
def get_accuracy(predictions, Y):
print(predictions, Y)
return np.sum(predictions == Y)/ Y.size
def gradient_descent(X, Y, iterations, alpha):
W1, b1, W2, b2 = init_params()
for i in range (iterations):
Z1, A1, Z2, A2 = forward_prop(W1, b1, W2, b2, X)
dW1, db1, dW2, db2 = back_prop (Z1, A1, Z2, A2, W2, X, Y)
W1, b1, W2, b2 = update_params (W1, b1, W2, b2, dW1, db1, dW2, db2, alpha)
if i % 10 == 0:
prin("Iteration: ", i)
print("Accuracy: ", get_accuracy(get_predictions (A2), Y))
return W1, b1, W2,b2
W1, b1, W2, b2 = gradient_descent(X_train, Y_train, 500, 0.1)
def make_predictions(X, W1, b1, W2, b2 )