-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathOutputLayer.m
54 lines (45 loc) · 1.87 KB
/
OutputLayer.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
classdef OutputLayer < WeightedLayer
%OUTPUTLAYER Summary of this class goes here
% Detailed explanation goes here
properties
y
end
methods
function output = OutputLayer(units, options)
%OUTPUTLAYER Construct an instance of this class
% Detailed explanation goes here
output.units = units;
output.activation = options.activation;
output.usebias = options.usebias;
output.kernelinitializer = options.kernelinitializer;
output.options = options;
end
function y = forward(output, X, cache)
y = forward@WeightedLayer(output, X, cache);
end
function backward(output, m, lambd)
assert(m == size(output.y, 2), 'Inconsistency');
if strcmp(output.activation, 'sigmoid') ...
|| strcmp(output.activation, 'softmax') ...
|| strcmp(output.activation, 'linear') ...
|| strcmp(output.activation, 'tanh') ...
|| strcmp(output.activation, 'leeoscillator')
output.dZ = output.A - output.y;
output.dW = (1/m) .* (output.dZ * output.prevlayer.A');
output.db = (1/m) .* sum(output.dZ, 2);
else
throw(MException('OutputLayer:notImplemented', ...
'Not implemented activation function for backpropagation.'));
end
output.prevlayer.backward(m, lambd);
end
function newlayer = copy(layer)
newlayer = OutputLayer(layer.units, layer.options);
layer.move(newlayer);
end
function newlayer = move(layer, newlayer)
move@WeightedLayer(layer, newlayer);
newlayer.y = layer.y;
end
end
end