-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfcnInitializeNetwork16s.m
62 lines (51 loc) · 1.61 KB
/
fcnInitializeNetwork16s.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
function net = fcnInitializeNetwork16s(net, varargin)
opts.rnn = false;
opts.nh = 512;
opts.nClass = 150;
opts.newLr = 1;
opts = vl_argparse(opts, varargin) ;
% nh = 512;
% nClass = 150;
nh = opts.nh;
nClass = opts.nClass;
% opts.newLr = 0.1;
%% Remove the last layer
net.removeLayer('deconv32') ;
filters = single(bilinear_u(4, nClass, nClass)) ;
net.addLayer('deconv32', ...
dagnn.ConvTranspose(...
'size', size(filters), ...
'upsample', 2, ...
'crop', 1, ...
'numGroups', nClass, ...
'hasBias', false), ...
'sum_1_out', 'x36', 'deconvf_1') ;
f = net.getParamIndex('deconvf_1') ;
net.params(f).value = filters ;
net.params(f).learningRate = 1 ;
net.params(f).weightDecay = 1 ;
%% build skip network
skip_inputs = {'x30', 'x28', 'x26', 'x24'};
% skip_inputs = {'x24'};
[net, skip_classifier_out] = skipNetwork(net, skip_inputs, 512, nh, ...
nClass, opts.newLr, 'skip4');
% Add summation layer
net.addLayer('sum2', dagnn.Sum(), ['x36', skip_classifier_out], 'x38') ;
% net.addLayer('sum2', DropSum('rate', 0.5), ['x35', skip_classifier_out], 'x38') ;
%% Add deconvolution layers
filters = single(bilinear_u(16, nClass, nClass)) ;
net.addLayer('deconv16', ...
dagnn.ConvTranspose(...
'size', size(filters), ...
'upsample', 8, ...
'crop', 4, ...
'numGroups', nClass, ...
'hasBias', false), ...
'x38', 'prediction', 'deconvf') ;
f = net.getParamIndex('deconvf') ;
net.params(f).value = filters ;
net.params(f).learningRate = 1 ;
net.params(f).weightDecay = 1 ;
% Make the output of the bilinear interpolator is not discared for
% visualization purposes
net.vars(net.getVarIndex('prediction')).precious = 1 ;