forked from torch/nn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SpatialLPPooling.lua
43 lines (34 loc) · 959 Bytes
/
SpatialLPPooling.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
local SpatialLPPooling, parent = torch.class('nn.SpatialLPPooling', 'nn.Sequential')
function SpatialLPPooling:__init(nInputPlane, pnorm, kW, kH, dW, dH)
parent.__init(self)
dW = dW or kW
dH = dH or kH
self.kW = kW
self.kH = kH
self.dW = dW
self.dH = dH
if pnorm == 2 then
self:add(nn.Square())
else
self:add(nn.Power(pnorm))
end
self:add(nn.SpatialAveragePooling(kW, kH, dW, dH))
self:add(nn.MulConstant(kW*kH))
if pnorm == 2 then
self:add(nn.Sqrt())
else
self:add(nn.Power(1/pnorm))
end
end
-- the module is a Sequential: by default, it'll try to learn the parameters
-- of the sub sampler: we avoid that by redefining its methods.
function SpatialLPPooling:reset()
end
function SpatialLPPooling:accGradParameters()
end
function SpatialLPPooling:accUpdateGradParameters()
end
function SpatialLPPooling:zeroGradParameters()
end
function SpatialLPPooling:updateParameters()
end