-
Notifications
You must be signed in to change notification settings - Fork 1
/
z_helper.py
49 lines (35 loc) · 1.15 KB
/
z_helper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import numpy as np
from numba import njit
def import_from_csv(path, data_type):
return np.genfromtxt(path, dtype=data_type, delimiter=',')
def class_to_array(maximum_class, x):
data = np.zeros(maximum_class) + 0.01
data[x-1] = 0.99
return data
def kfold(k, data, seed=99):
np.random.seed(seed)
data = np.random.permutation(data)
fold_size = int(len(data) / k)
return data[fold_size*2:], data[:fold_size], data[fold_size:fold_size*2]
@njit('float64[:, ::1](float64[:, ::1], boolean)')
def sigmoid(x, derivative):
if derivative:
return x * (1.0 - x)
else:
return 1.0 / (1.0 + np.exp(-x))
@njit('float64[:, ::1](float64[:, ::1], boolean)')
def relu(x, derivative):
if derivative:
return np.where(x <= 0.0, 0.0, 1.0)
else:
return np.maximum(0.0, x)
@njit('float64[:, ::1](float64[:, ::1], boolean)')
def leaky_relu(x, derivative):
if derivative:
return np.where(x <= 0.0, -0.01*x, 1.0)
else:
return np.maximum(-0.01*x, x)
@njit('float64[:, ::1](float64[:, ::1], boolean)')
def softmax(x, derivative):
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum()