-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathLayer.py
55 lines (42 loc) · 2.15 KB
/
Layer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import numpy as np
import Neuron
'''The layer class holds the parameters that are required by a layer within the network, including: the weight matrices,
the centroids for an RBF network, as well as the error values and derivatives used in backpropagation. Additionally
the layer class is used to propagate inputs through the network'''
class layer:
def __init__(self, weight_size, activation_function, input_layer = False, output_layer = False, in_sigma=None, k_means=None):
self.input_layer = input_layer
self.output_layer = output_layer
self.weight_size = weight_size # dimensions of weight matrix
self.k_means = k_means
# Holds output and input vector for the layer
self.outputs = np.zeros(weight_size[0])
self.inputs = None
# Create neuron for output calculations
self.neuron = Neuron.neuron(activation_function, sigma=in_sigma)
# Create matrices to hold weights, deltas, and derivatives
self.weights = None
self.delta_values = None
self.derivatives = None
if not input_layer:
self.inputs = np.zeros(weight_size[0])
self.delta_values = np.zeros(weight_size[0])
if not output_layer:
self.weights = np.random.uniform(-0.2, 0.2, size=weight_size)
if not output_layer and not input_layer:
self.derivatives=np.zeros(weight_size[0])
# Calculate output for layer's neurons
def calculate_output(self):
if self.input_layer:
return self.outputs.dot(self.weights)
# Run inputs through the activation function
self.outputs = self.neuron.calculate_output(i_inputs=self.inputs, in_Kvectors=self.k_means)
if self.output_layer:
return self.outputs
else:
# For hidden layers add bias values, and calculate derivatives
self.outputs = np.append(self.outputs, 1) # add 1 for bias activation
self.derivatives = self.neuron.calculate_output(i_inputs=self.inputs, i_want_derivative=True)
return self.outputs.dot(self.weights)
def set_delta(self, in_delta):
self.delta_values = in_delta