forked from clementfarabet/lua---nnx
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SpatialClassifier.lua
113 lines (96 loc) · 3.01 KB
/
SpatialClassifier.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
local Classifier, parent = torch.class('nn.SpatialClassifier', 'nn.Module')
function Classifier:__init(classifier)
parent.__init(self)
-- public:
self.classifier = classifier or nn.Sequential()
self.spatialOutput = true
-- private:
self.inputF = torch.Tensor()
self.inputT = torch.Tensor()
self.outputF = torch.Tensor()
self.output = torch.Tensor()
self.gradOutputF = torch.Tensor()
self.gradOutputT = torch.Tensor()
self.gradInputF = torch.Tensor()
self.gradInput = torch.Tensor()
-- compat:
self.modules = {self.classifier}
end
function Classifier:add(module)
self.classifier:add(module)
end
function Classifier:forward(input)
-- get dims:
if input:nDimension() ~= 3 then
error('<nn.SpatialClassifier> input should be 3D: KxHxW')
end
local K = input:size(1)
local H = input:size(2)
local W = input:size(3)
local HW = H*W
-- transpose input:
self.inputF:set(input):resize(K, HW)
self.inputT:resize(HW, K):copy(self.inputF:t())
-- classify all locations:
self.outputT = self.classifier:forward(self.inputT)
if self.spatialOutput then
-- transpose output:
local N = self.outputT:size(2)
self.outputF:resize(N, HW):copy(self.outputT:t())
self.output:set(self.outputF):resize(N,H,W)
else
-- leave output flat:
self.output = self.outputT
end
return self.output
end
function Classifier:backward(input, gradOutput)
-- get dims:
local K = input:size(1)
local H = input:size(2)
local W = input:size(3)
local HW = H*W
local N = gradOutput:size(1)
-- transpose input
self.inputF:set(input):resize(K, HW)
self.inputT:resize(HW, K):copy(self.inputF:t())
if self.spatialOutput then
-- transpose gradOutput
self.gradOutputF:set(gradOutput):resize(N, HW)
self.gradOutputT:resize(HW, N):copy(self.gradOutputF:t())
else
self.gradOutputT = gradOutput
end
-- backward through classifier:
self.gradInputT = self.classifier:backward(self.inputT, self.gradOutputT)
-- transpose gradInput
self.gradInputF:resize(K, HW):copy(self.gradInputT:t())
self.gradInput:set(self.gradInputF):resize(K,H,W)
return self.gradInput
end
function Classifier:accGradParameters(input, gradOutput, scale)
-- get dims:
local K = input:size(1)
local H = input:size(2)
local W = input:size(3)
local HW = H*W
local N = gradOutput:size(1)
-- transpose input
self.inputF:set(input):resize(K, HW)
self.inputT:resize(HW, K):copy(self.inputF:t())
if self.spatialOutput then
-- transpose gradOutput
self.gradOutputF:set(gradOutput):resize(N, HW)
self.gradOutputT:resize(HW, N):copy(self.gradOutputF:t())
else
self.gradOutputT = gradOutput
end
-- backward through classifier:
self.classifier:accGradParameters(self.inputT, self.gradOutputT, scale)
end
function Classifier:zeroGradParameters()
self.classifier:zeroGradParameters()
end
function Classifier:updateParameters(learningRate)
self.classifier:updateParameters(learningRate)
end