LeakyReLU.lua 1.87 KB
Newer Older
Benjamin Thomas Graham's avatar
Benjamin Thomas Graham committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
-- Copyright 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the license found in the
-- LICENSE file in the root directory of this source tree.

return function(sparseconvnet)
  local C = sparseconvnet.C
  local math = require 'math'
  local LeakyReLU, parent = torch.class(
    'sparseconvnet.LeakyReLU', 'nn.Module', sparseconvnet)

  function LeakyReLU:__init(leakage,ip)
    parent.__init(self)
    self.inplace = type(ip)~='boolean' or ip --default to inplace
    self.leakage = leakage

    self.output = {
      features = ip and "Recycle input.features" or torch.Tensor(),
    }
    self.gradInput = {
      features = ip and "Recycle gradOutput.features" or torch.Tensor()
    }
  end

  function LeakyReLU:updateOutput(input)
    self.output.metadata=input.metadata
    self.output.spatialSize = input.spatialSize
    C.typedFn(self._type,'LeakyReLU_updateOutput')(
      input.features:cdata(),
      self.output.features:cdata(),
      self.leakage)
    return self.output
  end

  function LeakyReLU:updateGradInput(input, gradOutput)
    if self.inplace then
      self.gradInput.features = gradOutput.features
    else
      self.gradInput.features:resizeAs(gradOutput.features)
    end
    C.typedFn(self._type,'LeakyReLU_updateGradInput')(
      input.features:cdata(),
      self.gradInput.features:cdata(),
      gradOutput.features:cdata(),
      self.leakage)
    return self.gradInput
  end

  function LeakyReLU:__tostring()
    local s = 'LeakyReLU('..self.leakage..')'
    return s
  end

  function LeakyReLU:clearState()
    self.output = {
      features = self.inplace and "Recycle input.features" or self.output.features:set(),
    }
    self.gradInput = {
      features = self.inplace and "Recycle gradOutput.features" or self.gradInput.features:set()
    }
  end

  function LeakyReLU:suggestInputSize(nOut)
    return nOut
  end
end